From b46dffde1d83cdda2af421c1a904b3216e64dc39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?K=C3=A9vin=20Chalet?= Date: Sat, 7 Dec 2024 15:24:34 +0100 Subject: [PATCH] Upload the first bits of the initial prototype --- .editorconfig | 169 ++ .github/FUNDING.yml | 2 + .github/workflows/build.yml | 135 ++ Build.cmd | 3 + Directory.Build.props | 78 + Directory.Build.targets | 34 + Directory.Packages.props | 47 + LICENSE.md | 202 ++ NuGet.config | 24 + OpenNetty.sln | 75 + build.sh | 16 + eng/Build.props | 7 + eng/Signing.props | 13 + eng/Tools.props | 7 + eng/Version.Details.xml | 19 + eng/Versions.props | 17 + eng/_._ | 1 + .../build-configuration.json | 4 + eng/common/CIBuild.cmd | 2 + eng/common/PSScriptAnalyzerSettings.psd1 | 11 + eng/common/README.md | 28 + eng/common/SetupNugetSources.ps1 | 167 ++ eng/common/SetupNugetSources.sh | 171 ++ eng/common/build.ps1 | 166 ++ eng/common/build.sh | 247 +++ eng/common/cibuild.sh | 16 + eng/common/cross/arm/sources.list.bionic | 11 + eng/common/cross/arm/sources.list.focal | 11 + eng/common/cross/arm/sources.list.jammy | 11 + eng/common/cross/arm/sources.list.jessie | 3 + eng/common/cross/arm/sources.list.trusty | 11 + eng/common/cross/arm/sources.list.xenial | 11 + eng/common/cross/arm/sources.list.zesty | 11 + eng/common/cross/arm/tizen-build-rootfs.sh | 35 + eng/common/cross/arm/tizen-fetch.sh | 170 ++ eng/common/cross/arm/tizen/tizen.patch | 9 + eng/common/cross/arm/trusty-lttng-2.4.patch | 71 + eng/common/cross/arm/trusty.patch | 97 + eng/common/cross/arm64/sources.list.bionic | 11 + eng/common/cross/arm64/sources.list.buster | 11 + eng/common/cross/arm64/sources.list.focal | 11 + eng/common/cross/arm64/sources.list.jammy | 11 + eng/common/cross/arm64/sources.list.stretch | 12 + eng/common/cross/arm64/sources.list.trusty | 11 + eng/common/cross/arm64/sources.list.xenial | 11 + eng/common/cross/arm64/sources.list.zesty | 11 + eng/common/cross/arm64/tizen-build-rootfs.sh | 35 + eng/common/cross/arm64/tizen-fetch.sh | 170 ++ eng/common/cross/arm64/tizen/tizen.patch | 9 + eng/common/cross/armel/armel.jessie.patch | 43 + eng/common/cross/armel/sources.list.jessie | 3 + eng/common/cross/armel/tizen-build-rootfs.sh | 35 + eng/common/cross/armel/tizen-fetch.sh | 170 ++ eng/common/cross/armel/tizen/tizen-dotnet.ks | 50 + eng/common/cross/armel/tizen/tizen.patch | 9 + eng/common/cross/armv6/sources.list.buster | 2 + eng/common/cross/build-android-rootfs.sh | 131 ++ eng/common/cross/build-rootfs.sh | 648 ++++++ eng/common/cross/ppc64le/sources.list.bionic | 11 + eng/common/cross/riscv64/sources.list.sid | 1 + eng/common/cross/s390x/sources.list.bionic | 11 + eng/common/cross/tizen-build-rootfs.sh | 61 + eng/common/cross/tizen-fetch.sh | 172 ++ eng/common/cross/toolchain.cmake | 377 ++++ eng/common/cross/x86/sources.list.bionic | 11 + eng/common/cross/x86/sources.list.focal | 11 + eng/common/cross/x86/sources.list.jammy | 11 + eng/common/cross/x86/sources.list.trusty | 11 + eng/common/cross/x86/sources.list.xenial | 11 + eng/common/cross/x86/tizen-build-rootfs.sh | 35 + eng/common/cross/x86/tizen-fetch.sh | 170 ++ eng/common/cross/x86/tizen/tizen.patch | 9 + eng/common/darc-init.ps1 | 47 + eng/common/darc-init.sh | 82 + .../dotnet-install-scripts/dotnet-install.ps1 | 774 +++++++ .../dotnet-install-scripts/dotnet-install.sh | 1133 +++++++++++ eng/common/dotnet-install.cmd | 2 + eng/common/dotnet-install.ps1 | 28 + eng/common/dotnet-install.sh | 91 + eng/common/enable-cross-org-publishing.ps1 | 13 + eng/common/generate-graph-files.ps1 | 86 + eng/common/generate-locproject.ps1 | 189 ++ eng/common/generate-sbom-prep.ps1 | 21 + eng/common/generate-sbom-prep.sh | 34 + eng/common/helixpublish.proj | 26 + eng/common/init-tools-native.cmd | 3 + eng/common/init-tools-native.ps1 | 203 ++ eng/common/init-tools-native.sh | 238 +++ eng/common/internal-feed-operations.ps1 | 132 ++ eng/common/internal-feed-operations.sh | 141 ++ eng/common/internal/Directory.Build.props | 4 + eng/common/internal/NuGet.config | 7 + eng/common/internal/Tools.csproj | 30 + eng/common/loc/P22DotNetHtmlLocalization.lss | 29 + eng/common/msbuild.ps1 | 28 + eng/common/msbuild.sh | 58 + eng/common/native/CommonLibrary.psm1 | 400 ++++ eng/common/native/common-library.sh | 172 ++ eng/common/native/find-native-compiler.sh | 121 ++ eng/common/native/init-compiler.sh | 137 ++ eng/common/native/init-distro-rid.sh | 130 ++ eng/common/native/init-os-and-arch.sh | 80 + eng/common/native/install-cmake-test.sh | 117 ++ eng/common/native/install-cmake.sh | 117 ++ eng/common/native/install-tool.ps1 | 132 ++ eng/common/performance/blazor_perf.proj | 30 + eng/common/performance/crossgen_perf.proj | 69 + eng/common/performance/microbenchmarks.proj | 144 ++ eng/common/performance/perfhelixpublish.proj | 121 ++ eng/common/performance/performance-setup.ps1 | 147 ++ eng/common/performance/performance-setup.sh | 289 +++ eng/common/pipeline-logging-functions.ps1 | 260 +++ eng/common/pipeline-logging-functions.sh | 206 ++ .../post-build/add-build-to-channel.ps1 | 48 + .../post-build/check-channel-consistency.ps1 | 40 + eng/common/post-build/nuget-validation.ps1 | 24 + eng/common/post-build/post-build-utils.ps1 | 91 + eng/common/post-build/publish-using-darc.ps1 | 54 + .../post-build/sourcelink-validation.ps1 | 319 +++ eng/common/post-build/symbols-validation.ps1 | 339 ++++ .../post-build/trigger-subscriptions.ps1 | 64 + eng/common/retain-build.ps1 | 45 + eng/common/sdk-task.ps1 | 97 + eng/common/sdl/NuGet.config | 18 + eng/common/sdl/configure-sdl-tool.ps1 | 130 ++ eng/common/sdl/execute-all-sdl-tools.ps1 | 167 ++ eng/common/sdl/extract-artifact-archives.ps1 | 63 + eng/common/sdl/extract-artifact-packages.ps1 | 82 + eng/common/sdl/init-sdl.ps1 | 55 + eng/common/sdl/packages.config | 4 + eng/common/sdl/push-gdn.ps1 | 69 + eng/common/sdl/run-sdl.ps1 | 49 + eng/common/sdl/sdl.ps1 | 38 + eng/common/sdl/trim-assets-version.ps1 | 75 + eng/common/templates/job/execute-sdl.yml | 139 ++ .../templates/job/generate-graph-files.yml | 48 + eng/common/templates/job/job.yml | 255 +++ eng/common/templates/job/onelocbuild.yml | 109 + eng/common/templates/job/performance.yml | 95 + .../templates/job/publish-build-assets.yml | 151 ++ eng/common/templates/job/source-build.yml | 66 + .../templates/job/source-index-stage1.yml | 67 + eng/common/templates/jobs/codeql-build.yml | 31 + eng/common/templates/jobs/jobs.yml | 97 + eng/common/templates/jobs/source-build.yml | 46 + eng/common/templates/phases/base.yml | 130 ++ .../templates/phases/publish-build-assets.yml | 52 + .../channels/generic-internal-channel.yml | 190 ++ .../channels/generic-public-channel.yml | 192 ++ .../templates/post-build/common-variables.yml | 22 + .../templates/post-build/post-build.yml | 281 +++ .../post-build/setup-maestro-vars.yml | 70 + .../post-build/trigger-subscription.yml | 13 + .../templates/steps/add-build-to-channel.yml | 13 + eng/common/templates/steps/build-reason.yml | 12 + .../templates/steps/component-governance.yml | 13 + eng/common/templates/steps/execute-codeql.yml | 32 + eng/common/templates/steps/execute-sdl.yml | 88 + eng/common/templates/steps/generate-sbom.yml | 48 + .../templates/steps/perf-send-to-helix.yml | 50 + eng/common/templates/steps/publish-logs.yml | 23 + eng/common/templates/steps/retain-build.yml | 28 + eng/common/templates/steps/run-on-unix.yml | 7 + eng/common/templates/steps/run-on-windows.yml | 7 + .../steps/run-script-ifequalelse.yml | 33 + eng/common/templates/steps/send-to-helix.yml | 91 + eng/common/templates/steps/source-build.yml | 129 ++ eng/common/templates/steps/telemetry-end.yml | 102 + .../templates/steps/telemetry-start.yml | 241 +++ .../templates/variables/pool-providers.yml | 57 + .../templates/variables/sdl-variables.yml | 7 + eng/common/tools.ps1 | 949 +++++++++ eng/common/tools.sh | 587 ++++++ eng/key.snk | Bin 0 -> 596 bytes global.json | 16 + package-icon.png | Bin 0 -> 9412 bytes src/OpenNetty.Daemon/OpenNetty.Daemon.csproj | 28 + src/OpenNetty.Daemon/Program.cs | 19 + src/OpenNetty.Daemon/appsettings.json | 8 + src/OpenNetty.Mqtt/IOpenNettyMqttWorker.cs | 23 + src/OpenNetty.Mqtt/OpenNetty.Mqtt.csproj | 21 + src/OpenNetty.Mqtt/OpenNettyMqttAttributes.cs | 72 + src/OpenNetty.Mqtt/OpenNettyMqttBuilder.cs | 158 ++ .../OpenNettyMqttConfiguration.cs | 19 + src/OpenNetty.Mqtt/OpenNettyMqttExtensions.cs | 60 + .../OpenNettyMqttHostedService.cs | 367 ++++ src/OpenNetty.Mqtt/OpenNettyMqttOperation.cs | 17 + src/OpenNetty.Mqtt/OpenNettyMqttOptions.cs | 28 + src/OpenNetty.Mqtt/OpenNettyMqttWorker.cs | 370 ++++ src/OpenNetty/IOpenNettyHandler.cs | 21 + src/OpenNetty/IOpenNettyPipeline.cs | 29 + src/OpenNetty/IOpenNettyService.cs | 235 +++ src/OpenNetty/IOpenNettyWorker.cs | 25 + src/OpenNetty/OpenNetty.csproj | 27 + src/OpenNetty/OpenNettyAddress.cs | 673 ++++++ src/OpenNetty/OpenNettyAddressType.cs | 62 + src/OpenNetty/OpenNettyBrand.cs | 17 + src/OpenNetty/OpenNettyBuilder.cs | 470 +++++ src/OpenNetty/OpenNettyCapabilities.cs | 167 ++ src/OpenNetty/OpenNettyCapability.cs | 54 + src/OpenNetty/OpenNettyCategories.cs | 37 + src/OpenNetty/OpenNettyCategory.cs | 212 ++ src/OpenNetty/OpenNettyCommand.cs | 233 +++ src/OpenNetty/OpenNettyCommands.cs | 221 ++ src/OpenNetty/OpenNettyConfiguration.cs | 29 + src/OpenNetty/OpenNettyConnection.cs | 306 +++ src/OpenNetty/OpenNettyConnectionType.cs | 17 + src/OpenNetty/OpenNettyConstants.cs | 39 + src/OpenNetty/OpenNettyController.cs | 1449 +++++++++++++ src/OpenNetty/OpenNettyCoordinator.cs | 1806 +++++++++++++++++ src/OpenNetty/OpenNettyDevice.cs | 83 + src/OpenNetty/OpenNettyDeviceDefinition.cs | 152 ++ src/OpenNetty/OpenNettyDevices.cs | 180 ++ src/OpenNetty/OpenNettyDevices.xml | 599 ++++++ src/OpenNetty/OpenNettyDimension.cs | 226 +++ src/OpenNetty/OpenNettyDimensions.cs | 141 ++ src/OpenNetty/OpenNettyEndpoint.cs | 240 +++ src/OpenNetty/OpenNettyErrorCode.cs | 77 + src/OpenNetty/OpenNettyEvents.cs | 366 ++++ src/OpenNetty/OpenNettyException.cs | 31 + src/OpenNetty/OpenNettyExtensions.cs | 58 + src/OpenNetty/OpenNettyField.cs | 197 ++ src/OpenNetty/OpenNettyFrame.cs | 194 ++ src/OpenNetty/OpenNettyFrames.cs | 22 + src/OpenNetty/OpenNettyGateway.cs | 230 +++ src/OpenNetty/OpenNettyGatewayOptions.cs | 267 +++ src/OpenNetty/OpenNettyHelpers.cs | 317 +++ src/OpenNetty/OpenNettyHostedService.cs | 127 ++ src/OpenNetty/OpenNettyIdentity.cs | 50 + src/OpenNetty/OpenNettyLogger.cs | 229 +++ src/OpenNetty/OpenNettyManager.cs | 299 +++ src/OpenNetty/OpenNettyMedia.cs | 27 + src/OpenNetty/OpenNettyMessage.cs | 749 +++++++ src/OpenNetty/OpenNettyMessageType.cs | 52 + src/OpenNetty/OpenNettyMode.cs | 22 + src/OpenNetty/OpenNettyModels.cs | 458 +++++ src/OpenNetty/OpenNettyNotification.cs | 12 + src/OpenNetty/OpenNettyNotifications.cs | 170 ++ src/OpenNetty/OpenNettyOptions.cs | 17 + src/OpenNetty/OpenNettyParameter.cs | 93 + src/OpenNetty/OpenNettyPipe.cs | 213 ++ src/OpenNetty/OpenNettyPipeline.cs | 92 + src/OpenNetty/OpenNettyProtocol.cs | 22 + src/OpenNetty/OpenNettyResources.resx | 453 +++++ src/OpenNetty/OpenNettyScenario.cs | 53 + src/OpenNetty/OpenNettyService.cs | 805 ++++++++ src/OpenNetty/OpenNettySession.cs | 675 ++++++ src/OpenNetty/OpenNettySessionType.cs | 22 + src/OpenNetty/OpenNettySetting.cs | 54 + src/OpenNetty/OpenNettySettings.cs | 37 + src/OpenNetty/OpenNettyTransaction.cs | 58 + src/OpenNetty/OpenNettyTransmissionOptions.cs | 34 + src/OpenNetty/OpenNettyUnit.cs | 81 + src/OpenNetty/OpenNettyUnitDefinition.cs | 98 + src/OpenNetty/OpenNettyWorker.cs | 318 +++ 255 files changed, 32956 insertions(+) create mode 100644 .editorconfig create mode 100644 .github/FUNDING.yml create mode 100644 .github/workflows/build.yml create mode 100644 Build.cmd create mode 100644 Directory.Build.props create mode 100644 Directory.Build.targets create mode 100644 Directory.Packages.props create mode 100644 LICENSE.md create mode 100644 NuGet.config create mode 100644 OpenNetty.sln create mode 100644 build.sh create mode 100644 eng/Build.props create mode 100644 eng/Signing.props create mode 100644 eng/Tools.props create mode 100644 eng/Version.Details.xml create mode 100644 eng/Versions.props create mode 100644 eng/_._ create mode 100644 eng/common/BuildConfiguration/build-configuration.json create mode 100644 eng/common/CIBuild.cmd create mode 100644 eng/common/PSScriptAnalyzerSettings.psd1 create mode 100644 eng/common/README.md create mode 100644 eng/common/SetupNugetSources.ps1 create mode 100644 eng/common/SetupNugetSources.sh create mode 100644 eng/common/build.ps1 create mode 100644 eng/common/build.sh create mode 100644 eng/common/cibuild.sh create mode 100644 eng/common/cross/arm/sources.list.bionic create mode 100644 eng/common/cross/arm/sources.list.focal create mode 100644 eng/common/cross/arm/sources.list.jammy create mode 100644 eng/common/cross/arm/sources.list.jessie create mode 100644 eng/common/cross/arm/sources.list.trusty create mode 100644 eng/common/cross/arm/sources.list.xenial create mode 100644 eng/common/cross/arm/sources.list.zesty create mode 100644 eng/common/cross/arm/tizen-build-rootfs.sh create mode 100644 eng/common/cross/arm/tizen-fetch.sh create mode 100644 eng/common/cross/arm/tizen/tizen.patch create mode 100644 eng/common/cross/arm/trusty-lttng-2.4.patch create mode 100644 eng/common/cross/arm/trusty.patch create mode 100644 eng/common/cross/arm64/sources.list.bionic create mode 100644 eng/common/cross/arm64/sources.list.buster create mode 100644 eng/common/cross/arm64/sources.list.focal create mode 100644 eng/common/cross/arm64/sources.list.jammy create mode 100644 eng/common/cross/arm64/sources.list.stretch create mode 100644 eng/common/cross/arm64/sources.list.trusty create mode 100644 eng/common/cross/arm64/sources.list.xenial create mode 100644 eng/common/cross/arm64/sources.list.zesty create mode 100644 eng/common/cross/arm64/tizen-build-rootfs.sh create mode 100644 eng/common/cross/arm64/tizen-fetch.sh create mode 100644 eng/common/cross/arm64/tizen/tizen.patch create mode 100644 eng/common/cross/armel/armel.jessie.patch create mode 100644 eng/common/cross/armel/sources.list.jessie create mode 100644 eng/common/cross/armel/tizen-build-rootfs.sh create mode 100644 eng/common/cross/armel/tizen-fetch.sh create mode 100644 eng/common/cross/armel/tizen/tizen-dotnet.ks create mode 100644 eng/common/cross/armel/tizen/tizen.patch create mode 100644 eng/common/cross/armv6/sources.list.buster create mode 100644 eng/common/cross/build-android-rootfs.sh create mode 100644 eng/common/cross/build-rootfs.sh create mode 100644 eng/common/cross/ppc64le/sources.list.bionic create mode 100644 eng/common/cross/riscv64/sources.list.sid create mode 100644 eng/common/cross/s390x/sources.list.bionic create mode 100644 eng/common/cross/tizen-build-rootfs.sh create mode 100644 eng/common/cross/tizen-fetch.sh create mode 100644 eng/common/cross/toolchain.cmake create mode 100644 eng/common/cross/x86/sources.list.bionic create mode 100644 eng/common/cross/x86/sources.list.focal create mode 100644 eng/common/cross/x86/sources.list.jammy create mode 100644 eng/common/cross/x86/sources.list.trusty create mode 100644 eng/common/cross/x86/sources.list.xenial create mode 100644 eng/common/cross/x86/tizen-build-rootfs.sh create mode 100644 eng/common/cross/x86/tizen-fetch.sh create mode 100644 eng/common/cross/x86/tizen/tizen.patch create mode 100644 eng/common/darc-init.ps1 create mode 100644 eng/common/darc-init.sh create mode 100644 eng/common/dotnet-install-scripts/dotnet-install.ps1 create mode 100644 eng/common/dotnet-install-scripts/dotnet-install.sh create mode 100644 eng/common/dotnet-install.cmd create mode 100644 eng/common/dotnet-install.ps1 create mode 100644 eng/common/dotnet-install.sh create mode 100644 eng/common/enable-cross-org-publishing.ps1 create mode 100644 eng/common/generate-graph-files.ps1 create mode 100644 eng/common/generate-locproject.ps1 create mode 100644 eng/common/generate-sbom-prep.ps1 create mode 100644 eng/common/generate-sbom-prep.sh create mode 100644 eng/common/helixpublish.proj create mode 100644 eng/common/init-tools-native.cmd create mode 100644 eng/common/init-tools-native.ps1 create mode 100644 eng/common/init-tools-native.sh create mode 100644 eng/common/internal-feed-operations.ps1 create mode 100644 eng/common/internal-feed-operations.sh create mode 100644 eng/common/internal/Directory.Build.props create mode 100644 eng/common/internal/NuGet.config create mode 100644 eng/common/internal/Tools.csproj create mode 100644 eng/common/loc/P22DotNetHtmlLocalization.lss create mode 100644 eng/common/msbuild.ps1 create mode 100644 eng/common/msbuild.sh create mode 100644 eng/common/native/CommonLibrary.psm1 create mode 100644 eng/common/native/common-library.sh create mode 100644 eng/common/native/find-native-compiler.sh create mode 100644 eng/common/native/init-compiler.sh create mode 100644 eng/common/native/init-distro-rid.sh create mode 100644 eng/common/native/init-os-and-arch.sh create mode 100644 eng/common/native/install-cmake-test.sh create mode 100644 eng/common/native/install-cmake.sh create mode 100644 eng/common/native/install-tool.ps1 create mode 100644 eng/common/performance/blazor_perf.proj create mode 100644 eng/common/performance/crossgen_perf.proj create mode 100644 eng/common/performance/microbenchmarks.proj create mode 100644 eng/common/performance/perfhelixpublish.proj create mode 100644 eng/common/performance/performance-setup.ps1 create mode 100644 eng/common/performance/performance-setup.sh create mode 100644 eng/common/pipeline-logging-functions.ps1 create mode 100644 eng/common/pipeline-logging-functions.sh create mode 100644 eng/common/post-build/add-build-to-channel.ps1 create mode 100644 eng/common/post-build/check-channel-consistency.ps1 create mode 100644 eng/common/post-build/nuget-validation.ps1 create mode 100644 eng/common/post-build/post-build-utils.ps1 create mode 100644 eng/common/post-build/publish-using-darc.ps1 create mode 100644 eng/common/post-build/sourcelink-validation.ps1 create mode 100644 eng/common/post-build/symbols-validation.ps1 create mode 100644 eng/common/post-build/trigger-subscriptions.ps1 create mode 100644 eng/common/retain-build.ps1 create mode 100644 eng/common/sdk-task.ps1 create mode 100644 eng/common/sdl/NuGet.config create mode 100644 eng/common/sdl/configure-sdl-tool.ps1 create mode 100644 eng/common/sdl/execute-all-sdl-tools.ps1 create mode 100644 eng/common/sdl/extract-artifact-archives.ps1 create mode 100644 eng/common/sdl/extract-artifact-packages.ps1 create mode 100644 eng/common/sdl/init-sdl.ps1 create mode 100644 eng/common/sdl/packages.config create mode 100644 eng/common/sdl/push-gdn.ps1 create mode 100644 eng/common/sdl/run-sdl.ps1 create mode 100644 eng/common/sdl/sdl.ps1 create mode 100644 eng/common/sdl/trim-assets-version.ps1 create mode 100644 eng/common/templates/job/execute-sdl.yml create mode 100644 eng/common/templates/job/generate-graph-files.yml create mode 100644 eng/common/templates/job/job.yml create mode 100644 eng/common/templates/job/onelocbuild.yml create mode 100644 eng/common/templates/job/performance.yml create mode 100644 eng/common/templates/job/publish-build-assets.yml create mode 100644 eng/common/templates/job/source-build.yml create mode 100644 eng/common/templates/job/source-index-stage1.yml create mode 100644 eng/common/templates/jobs/codeql-build.yml create mode 100644 eng/common/templates/jobs/jobs.yml create mode 100644 eng/common/templates/jobs/source-build.yml create mode 100644 eng/common/templates/phases/base.yml create mode 100644 eng/common/templates/phases/publish-build-assets.yml create mode 100644 eng/common/templates/post-build/channels/generic-internal-channel.yml create mode 100644 eng/common/templates/post-build/channels/generic-public-channel.yml create mode 100644 eng/common/templates/post-build/common-variables.yml create mode 100644 eng/common/templates/post-build/post-build.yml create mode 100644 eng/common/templates/post-build/setup-maestro-vars.yml create mode 100644 eng/common/templates/post-build/trigger-subscription.yml create mode 100644 eng/common/templates/steps/add-build-to-channel.yml create mode 100644 eng/common/templates/steps/build-reason.yml create mode 100644 eng/common/templates/steps/component-governance.yml create mode 100644 eng/common/templates/steps/execute-codeql.yml create mode 100644 eng/common/templates/steps/execute-sdl.yml create mode 100644 eng/common/templates/steps/generate-sbom.yml create mode 100644 eng/common/templates/steps/perf-send-to-helix.yml create mode 100644 eng/common/templates/steps/publish-logs.yml create mode 100644 eng/common/templates/steps/retain-build.yml create mode 100644 eng/common/templates/steps/run-on-unix.yml create mode 100644 eng/common/templates/steps/run-on-windows.yml create mode 100644 eng/common/templates/steps/run-script-ifequalelse.yml create mode 100644 eng/common/templates/steps/send-to-helix.yml create mode 100644 eng/common/templates/steps/source-build.yml create mode 100644 eng/common/templates/steps/telemetry-end.yml create mode 100644 eng/common/templates/steps/telemetry-start.yml create mode 100644 eng/common/templates/variables/pool-providers.yml create mode 100644 eng/common/templates/variables/sdl-variables.yml create mode 100644 eng/common/tools.ps1 create mode 100644 eng/common/tools.sh create mode 100644 eng/key.snk create mode 100644 global.json create mode 100644 package-icon.png create mode 100644 src/OpenNetty.Daemon/OpenNetty.Daemon.csproj create mode 100644 src/OpenNetty.Daemon/Program.cs create mode 100644 src/OpenNetty.Daemon/appsettings.json create mode 100644 src/OpenNetty.Mqtt/IOpenNettyMqttWorker.cs create mode 100644 src/OpenNetty.Mqtt/OpenNetty.Mqtt.csproj create mode 100644 src/OpenNetty.Mqtt/OpenNettyMqttAttributes.cs create mode 100644 src/OpenNetty.Mqtt/OpenNettyMqttBuilder.cs create mode 100644 src/OpenNetty.Mqtt/OpenNettyMqttConfiguration.cs create mode 100644 src/OpenNetty.Mqtt/OpenNettyMqttExtensions.cs create mode 100644 src/OpenNetty.Mqtt/OpenNettyMqttHostedService.cs create mode 100644 src/OpenNetty.Mqtt/OpenNettyMqttOperation.cs create mode 100644 src/OpenNetty.Mqtt/OpenNettyMqttOptions.cs create mode 100644 src/OpenNetty.Mqtt/OpenNettyMqttWorker.cs create mode 100644 src/OpenNetty/IOpenNettyHandler.cs create mode 100644 src/OpenNetty/IOpenNettyPipeline.cs create mode 100644 src/OpenNetty/IOpenNettyService.cs create mode 100644 src/OpenNetty/IOpenNettyWorker.cs create mode 100644 src/OpenNetty/OpenNetty.csproj create mode 100644 src/OpenNetty/OpenNettyAddress.cs create mode 100644 src/OpenNetty/OpenNettyAddressType.cs create mode 100644 src/OpenNetty/OpenNettyBrand.cs create mode 100644 src/OpenNetty/OpenNettyBuilder.cs create mode 100644 src/OpenNetty/OpenNettyCapabilities.cs create mode 100644 src/OpenNetty/OpenNettyCapability.cs create mode 100644 src/OpenNetty/OpenNettyCategories.cs create mode 100644 src/OpenNetty/OpenNettyCategory.cs create mode 100644 src/OpenNetty/OpenNettyCommand.cs create mode 100644 src/OpenNetty/OpenNettyCommands.cs create mode 100644 src/OpenNetty/OpenNettyConfiguration.cs create mode 100644 src/OpenNetty/OpenNettyConnection.cs create mode 100644 src/OpenNetty/OpenNettyConnectionType.cs create mode 100644 src/OpenNetty/OpenNettyConstants.cs create mode 100644 src/OpenNetty/OpenNettyController.cs create mode 100644 src/OpenNetty/OpenNettyCoordinator.cs create mode 100644 src/OpenNetty/OpenNettyDevice.cs create mode 100644 src/OpenNetty/OpenNettyDeviceDefinition.cs create mode 100644 src/OpenNetty/OpenNettyDevices.cs create mode 100644 src/OpenNetty/OpenNettyDevices.xml create mode 100644 src/OpenNetty/OpenNettyDimension.cs create mode 100644 src/OpenNetty/OpenNettyDimensions.cs create mode 100644 src/OpenNetty/OpenNettyEndpoint.cs create mode 100644 src/OpenNetty/OpenNettyErrorCode.cs create mode 100644 src/OpenNetty/OpenNettyEvents.cs create mode 100644 src/OpenNetty/OpenNettyException.cs create mode 100644 src/OpenNetty/OpenNettyExtensions.cs create mode 100644 src/OpenNetty/OpenNettyField.cs create mode 100644 src/OpenNetty/OpenNettyFrame.cs create mode 100644 src/OpenNetty/OpenNettyFrames.cs create mode 100644 src/OpenNetty/OpenNettyGateway.cs create mode 100644 src/OpenNetty/OpenNettyGatewayOptions.cs create mode 100644 src/OpenNetty/OpenNettyHelpers.cs create mode 100644 src/OpenNetty/OpenNettyHostedService.cs create mode 100644 src/OpenNetty/OpenNettyIdentity.cs create mode 100644 src/OpenNetty/OpenNettyLogger.cs create mode 100644 src/OpenNetty/OpenNettyManager.cs create mode 100644 src/OpenNetty/OpenNettyMedia.cs create mode 100644 src/OpenNetty/OpenNettyMessage.cs create mode 100644 src/OpenNetty/OpenNettyMessageType.cs create mode 100644 src/OpenNetty/OpenNettyMode.cs create mode 100644 src/OpenNetty/OpenNettyModels.cs create mode 100644 src/OpenNetty/OpenNettyNotification.cs create mode 100644 src/OpenNetty/OpenNettyNotifications.cs create mode 100644 src/OpenNetty/OpenNettyOptions.cs create mode 100644 src/OpenNetty/OpenNettyParameter.cs create mode 100644 src/OpenNetty/OpenNettyPipe.cs create mode 100644 src/OpenNetty/OpenNettyPipeline.cs create mode 100644 src/OpenNetty/OpenNettyProtocol.cs create mode 100644 src/OpenNetty/OpenNettyResources.resx create mode 100644 src/OpenNetty/OpenNettyScenario.cs create mode 100644 src/OpenNetty/OpenNettyService.cs create mode 100644 src/OpenNetty/OpenNettySession.cs create mode 100644 src/OpenNetty/OpenNettySessionType.cs create mode 100644 src/OpenNetty/OpenNettySetting.cs create mode 100644 src/OpenNetty/OpenNettySettings.cs create mode 100644 src/OpenNetty/OpenNettyTransaction.cs create mode 100644 src/OpenNetty/OpenNettyTransmissionOptions.cs create mode 100644 src/OpenNetty/OpenNettyUnit.cs create mode 100644 src/OpenNetty/OpenNettyUnitDefinition.cs create mode 100644 src/OpenNetty/OpenNettyWorker.cs diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..27ad135 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,169 @@ +root = true + +[*.cs] +end_of_line = crlf +indent_size = 4 +indent_style = space +insert_final_newline = false +tab_width = 4 + +csharp_indent_block_contents = true +csharp_indent_braces = false +csharp_indent_case_contents = true +csharp_indent_case_contents_when_block = true +csharp_indent_labels = one_less_than_current +csharp_indent_switch_labels = true +csharp_new_line_before_catch = true +csharp_new_line_before_else = true +csharp_new_line_before_finally = true +csharp_new_line_before_members_in_anonymous_types = true +csharp_new_line_before_members_in_object_initializers = true +csharp_new_line_before_open_brace = all +csharp_new_line_between_query_expression_clauses = true +csharp_prefer_braces = true:silent +csharp_prefer_simple_default_expression = true +csharp_prefer_simple_using_statement = true:suggestion +csharp_prefer_static_anonymous_function = true:suggestion +csharp_prefer_static_local_function = true:suggestion +csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async +csharp_preserve_single_line_blocks = true +csharp_preserve_single_line_statements = true +csharp_space_after_cast = true +csharp_space_after_colon_in_inheritance_clause = true +csharp_space_after_comma = true +csharp_space_after_dot = false +csharp_space_after_keywords_in_control_flow_statements = true +csharp_space_after_semicolon_in_for_statement = true +csharp_space_around_binary_operators = before_and_after +csharp_space_around_declaration_statements = false +csharp_space_before_colon_in_inheritance_clause = true +csharp_space_before_comma = false +csharp_space_before_dot = false +csharp_space_before_open_square_brackets = false +csharp_space_before_semicolon_in_for_statement = false +csharp_space_between_empty_square_brackets = false +csharp_space_between_method_call_empty_parameter_list_parentheses = false +csharp_space_between_method_call_name_and_opening_parenthesis = false +csharp_space_between_method_call_parameter_list_parentheses = false +csharp_space_between_method_declaration_empty_parameter_list_parentheses = false +csharp_space_between_method_declaration_name_and_open_parenthesis = false +csharp_space_between_method_declaration_parameter_list_parentheses = false +csharp_space_between_parentheses = false +csharp_space_between_square_brackets = false +csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = true +csharp_style_allow_blank_line_after_token_in_arrow_expression_clause_experimental = true +csharp_style_allow_blank_line_after_token_in_conditional_expression_experimental = true +csharp_style_allow_blank_lines_between_consecutive_braces_experimental = true +csharp_style_allow_embedded_statements_on_same_line_experimental = true +csharp_style_conditional_delegate_call = true +csharp_style_deconstructed_variable_declaration = true +csharp_style_expression_bodied_accessors = true:silent +csharp_style_expression_bodied_constructors = false:silent +csharp_style_expression_bodied_indexers = true:silent +csharp_style_expression_bodied_lambdas = true:silent +csharp_style_expression_bodied_local_functions = false:silent +csharp_style_expression_bodied_methods = false:silent +csharp_style_expression_bodied_operators = false:silent +csharp_style_expression_bodied_properties = true:silent +csharp_style_implicit_object_creation_when_type_is_apparent = true +csharp_style_inlined_variable_declaration = true +csharp_style_namespace_declarations = block_scoped:silent +csharp_style_pattern_matching_over_as_with_null_check = true:suggestion +csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion +csharp_style_prefer_extended_property_pattern = true:suggestion +csharp_style_prefer_index_operator = true +csharp_style_prefer_local_over_anonymous_function = true +csharp_style_prefer_method_group_conversion = true:silent +csharp_style_prefer_not_pattern = true:suggestion +csharp_style_prefer_null_check_over_type_check = true +csharp_style_prefer_pattern_matching = true:silent +csharp_style_prefer_primary_constructors = false:suggestion +csharp_style_prefer_range_operator = true +csharp_style_prefer_readonly_struct = true:suggestion +csharp_style_prefer_readonly_struct_member = true:suggestion +csharp_style_prefer_switch_expression = true:suggestion +csharp_style_prefer_top_level_statements = true:silent +csharp_style_prefer_tuple_swap = true +csharp_style_prefer_utf8_string_literals = true +csharp_style_throw_expression = true +csharp_style_unused_value_assignment_preference = discard_variable +csharp_style_unused_value_expression_statement_preference = discard_variable +csharp_style_var_elsewhere = false +csharp_style_var_for_built_in_types = false +csharp_style_var_when_type_is_apparent = false +csharp_using_directive_placement = outside_namespace + +[*.{cs,vb}] +dotnet_code_quality_unused_parameters = all +dotnet_diagnostic.CA1510.severity = none +dotnet_diagnostic.CA2254.severity = none +dotnet_diagnostic.IDE0002.severity = none +dotnet_diagnostic.IDE0305.severity = none +dotnet_naming_rule.interface_should_be_begins_with_i.severity = suggestion +dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i +dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface +dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case +dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members +dotnet_naming_rule.types_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case +dotnet_naming_rule.types_should_be_pascal_case.symbols = types +dotnet_naming_style.begins_with_i.capitalization = pascal_case +dotnet_naming_style.begins_with_i.required_prefix = I +dotnet_naming_style.begins_with_i.required_suffix = +dotnet_naming_style.begins_with_i.word_separator = +dotnet_naming_style.pascal_case.capitalization = pascal_case +dotnet_naming_style.pascal_case.required_prefix = +dotnet_naming_style.pascal_case.required_suffix = +dotnet_naming_style.pascal_case.word_separator = +dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.interface.applicable_kinds = interface +dotnet_naming_symbols.interface.required_modifiers = +dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method +dotnet_naming_symbols.non_field_members.required_modifiers = +dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum +dotnet_naming_symbols.types.required_modifiers = +dotnet_remove_unnecessary_suppression_exclusions = none +dotnet_separate_import_directive_groups = false +dotnet_sort_system_directives_first = true +dotnet_style_allow_multiple_blank_lines_experimental = true +dotnet_style_allow_statement_immediately_after_block_experimental = true +dotnet_style_coalesce_expression = true +dotnet_style_collection_initializer = true +dotnet_style_explicit_tuple_names = true +dotnet_style_namespace_match_folder = false +dotnet_style_null_propagation = true +dotnet_style_object_initializer = true +dotnet_style_operator_placement_when_wrapping = beginning_of_line +dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity +dotnet_style_parentheses_in_other_binary_operators = always_for_clarity +dotnet_style_parentheses_in_other_operators = never_if_unnecessary +dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity +dotnet_style_predefined_type_for_locals_parameters_members = true +dotnet_style_predefined_type_for_member_access = true +dotnet_style_prefer_auto_properties = true +dotnet_style_prefer_collection_expression = when_types_loosely_match +dotnet_style_prefer_compound_assignment = true +dotnet_style_prefer_conditional_expression_over_assignment = true +dotnet_style_prefer_conditional_expression_over_return = true +dotnet_style_prefer_foreach_explicit_cast_in_source = when_strongly_typed +dotnet_style_prefer_inferred_anonymous_type_member_names = true +dotnet_style_prefer_inferred_tuple_names = true +dotnet_style_prefer_is_null_check_over_reference_equality_method = true +dotnet_style_prefer_simplified_boolean_expressions = true +dotnet_style_prefer_simplified_interpolation = true +dotnet_style_qualification_for_event = false +dotnet_style_qualification_for_field = false +dotnet_style_qualification_for_method = false +dotnet_style_qualification_for_property = false +dotnet_style_readonly_field = true +dotnet_style_require_accessibility_modifiers = for_non_interface_members + +[*.{json,xml}] +end_of_line = crlf +indent_size = 2 +indent_style = space +insert_final_newline = false +tab_width = 2 diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..52f84fd --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,2 @@ +github: kevinchalet +custom: https://paypal.me/kevinchalet diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..aa26361 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,135 @@ +name: build + +on: + push: + branches: [ dev, rel/* ] + tags: [ '*' ] + pull_request: + branches: [ dev, rel/* ] + workflow_dispatch: + +env: + DOTNET_MULTILEVEL_LOOKUP: 0 + DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1 + DOTNET_SYSTEM_CONSOLE_ALLOW_ANSI_COLOR_REDIRECTION: 1 + NUGET_XMLDOC_MODE: skip + TERM: xterm + +permissions: + contents: read + +jobs: + build: + name: build-${{ matrix.os }} + runs-on: ${{ matrix.os }} + + permissions: + attestations: write + contents: read + id-token: write + + strategy: + fail-fast: false + matrix: + os: [ macos-latest, ubuntu-latest, windows-latest ] + include: + - os: macos-latest + os_name: macos + - os: ubuntu-latest + os_name: linux + - os: windows-latest + os_name: windows + + steps: + - name: Select Xcode version + if: ${{ runner.os == 'macOS' }} + run: sudo xcode-select -s "/Applications/Xcode_15.4.app" + + - name: Checkout code + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + + # Arcade only allows the revision to contain up to two characters, and GitHub Actions does not roll-over + # build numbers every day like Azure DevOps does. To balance these two requirements, set the official + # build ID to be the same format as the built-in default from Arcade, except with the revision number + # being the number of the quarter hour of the current time of day (24 * 4 = 96, which is less than 100). + # So a build between 00:00 and 00:14 would have a revision of 1, and a build between 23:45 and 23:59:59 + # would have a revision of 97. + - name: Set Build ID + if: ${{ startsWith(github.ref, 'refs/pull/') == false }} + shell: pwsh + run: | + $Now = (Get-Date).ToUniversalTime() + $Hours = $Now.Hour * 4 + $QuarterHours = [Math]::Floor($Now.Minute / 15.0) + $Revision = $Hours + $QuarterHours + 1 + $BuildId = $Now.ToString("yyyyMMdd") + "." + $Revision + Write-Output "_ComputedOfficialBuildId=${BuildId}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + + - name: Build, test and pack + if: ${{ runner.os == 'Windows' }} + run: eng\common\CIBuild.cmd -configuration Release -prepareMachine -integrationTest /p:RestoreDotNetWorkloads=true + + - name: Build, test and pack + if: ${{ runner.os != 'Windows' }} + shell: pwsh + run: ./eng/common/cibuild.sh -configuration Release -prepareMachine -integrationTest /p:RestoreDotNetWorkloads=true + + - name: Attest artifacts + uses: actions/attest-build-provenance@bdd51370e0416ac948727f861e03c2f05d32d78e # v1.3.2 + if: | + runner.os == 'Windows' && + github.event.repository.fork == false && + startsWith(github.ref, 'refs/tags/') + with: + subject-path: | + ./artifacts/bin/**/Release/**/OpenNetty.*.dll + ./artifacts/packages/Release/Shipping/* + + - name: Publish logs + if: ${{ always() }} + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + with: + name: logs-${{ matrix.os_name }} + path: ./artifacts/log/Release + + - name: Publish NuGet packages + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + with: + name: packages-${{ matrix.os_name }} + path: ./artifacts/packages/Release/Shipping + + - name: Publish test results + if: ${{ always() }} + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + with: + name: testresults-${{ matrix.os_name }} + path: ./artifacts/TestResults/Release + + validate-packages: + needs: build + runs-on: ubuntu-latest + steps: + - name: Download packages + uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + with: + name: packages-windows + + - name: Setup .NET + uses: actions/setup-dotnet@4d6c8fcf3c8f7a60068d26b594648e99df24cee3 # v4.0.0 + + - name: Validate NuGet packages + shell: pwsh + run: | + dotnet tool install --global dotnet-validate --version 0.0.1-preview.304 + $packages = Get-ChildItem -Filter "*.nupkg" | ForEach-Object { $_.FullName } + $invalidPackages = 0 + foreach ($package in $packages) { + dotnet validate package local $package + if ($LASTEXITCODE -ne 0) { + $invalidPackages++ + } + } + if ($invalidPackages -gt 0) { + Write-Output "::error::$invalidPackages NuGet package(s) failed validation." + exit 1 + } diff --git a/Build.cmd b/Build.cmd new file mode 100644 index 0000000..4afad04 --- /dev/null +++ b/Build.cmd @@ -0,0 +1,3 @@ +@echo off +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\common\Build.ps1""" -restore -build %*" +exit /b %ErrorLevel% diff --git a/Directory.Build.props b/Directory.Build.props new file mode 100644 index 0000000..60315a8 --- /dev/null +++ b/Directory.Build.props @@ -0,0 +1,78 @@ + + + + + + preview + true + preview + true + $(NoWarn);CS1591;NETSDK1206;NU5118;NU5128;xUnit2002 + NU1901;NU1902;NU1903;NU1904 + direct + enable + enable + true + portable + snupkg + true + false + true + true + true + false + true + + + + OpenNetty + $(MSBuildThisFileDirectory)eng\key.snk + true + false + false + 0024000004800000940000000602000000240000525341310004000001000100ede88273f5b5fa40626c313bbc686c05a3a9d46fc032f8b9585e864c55279d894ff710ea74eb671196303047c59cb27981bcea2a295c55751a05f7f8b16b47a4fc4f1f9f4878ba76b6e1016759b884b486407945503c480a4093e7057b8f48c77057268968422f35cab9daaf3444cb0f4bb0f59554434c262c7d5081caa8a1c6 + a17b902d06b60845 + + + + Kévin Chalet + $(Authors) + OpenNetty + <_ProjectCopyright>© Kévin Chalet. All rights reserved. + $(MSBuildThisFileDirectory)package-icon.png + https://github.com/opennetty/opennetty-core + Apache-2.0 + bticino;inone;iobl;legrand;myhome;nitoo;opennetty;openwebnet;scs;zigbee + git + https://github.com/opennetty/opennetty-core.git + + + + $([MSBuild]::MakeRelative($(RepoRoot), $(MSBuildProjectDirectory))) + false + false + + + + + true + $(_ComputedOfficialBuildId) + + + + true + true + true + true + false + + + + + + + + diff --git a/Directory.Build.targets b/Directory.Build.targets new file mode 100644 index 0000000..fbf36b0 --- /dev/null +++ b/Directory.Build.targets @@ -0,0 +1,34 @@ + + + + + + + + $(_ProjectCopyright) + false + false + + + + + + + + + diff --git a/Directory.Packages.props b/Directory.Packages.props new file mode 100644 index 0000000..93e529b --- /dev/null +++ b/Directory.Packages.props @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/NuGet.config b/NuGet.config new file mode 100644 index 0000000..998a172 --- /dev/null +++ b/NuGet.config @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/OpenNetty.sln b/OpenNetty.sln new file mode 100644 index 0000000..158d089 --- /dev/null +++ b/OpenNetty.sln @@ -0,0 +1,75 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.5.33530.505 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "OpenNetty", "src\OpenNetty\OpenNetty.csproj", "{E2B5ED97-FF3F-42AC-9B62-CD7FBD147C9A}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "OpenNetty.Mqtt", "src\OpenNetty.Mqtt\OpenNetty.Mqtt.csproj", "{5D575A92-DD1E-48CE-9946-0850F2D1A9E3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "OpenNetty.Daemon", "src\OpenNetty.Daemon\OpenNetty.Daemon.csproj", "{C4599CC7-E6D9-48E5-859C-DBED20DFAC32}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{02EA681E-C7D8-13C7-8484-4AC65E1B71E8}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "root", "root", "{0570A3E6-8025-4B6D-AE9F-527B5E3CCE89}" + ProjectSection(SolutionItems) = preProject + .editorconfig = .editorconfig + .gitattributes = .gitattributes + .gitignore = .gitignore + Build.cmd = Build.cmd + build.sh = build.sh + Directory.Build.props = Directory.Build.props + Directory.Build.targets = Directory.Build.targets + Directory.Packages.props = Directory.Packages.props + global.json = global.json + LICENSE.md = LICENSE.md + NuGet.config = NuGet.config + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "eng", "eng", "{B2C1F565-2CCD-47FD-941C-4F01871186EA}" + ProjectSection(SolutionItems) = preProject + eng\Build.props = eng\Build.props + eng\key.snk = eng\key.snk + eng\Signing.props = eng\Signing.props + eng\Tools.props = eng\Tools.props + eng\Version.Details.xml = eng\Version.Details.xml + eng\Versions.props = eng\Versions.props + eng\_._ = eng\_._ + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "workflows", "workflows", "{02EA681E-C7D8-13C7-8484-4AC65E1B71E8}" + ProjectSection(SolutionItems) = preProject + .github\workflows\build.yml = .github\workflows\build.yml + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {E2B5ED97-FF3F-42AC-9B62-CD7FBD147C9A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E2B5ED97-FF3F-42AC-9B62-CD7FBD147C9A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E2B5ED97-FF3F-42AC-9B62-CD7FBD147C9A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E2B5ED97-FF3F-42AC-9B62-CD7FBD147C9A}.Release|Any CPU.Build.0 = Release|Any CPU + {5D575A92-DD1E-48CE-9946-0850F2D1A9E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5D575A92-DD1E-48CE-9946-0850F2D1A9E3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5D575A92-DD1E-48CE-9946-0850F2D1A9E3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5D575A92-DD1E-48CE-9946-0850F2D1A9E3}.Release|Any CPU.Build.0 = Release|Any CPU + {C4599CC7-E6D9-48E5-859C-DBED20DFAC32}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C4599CC7-E6D9-48E5-859C-DBED20DFAC32}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C4599CC7-E6D9-48E5-859C-DBED20DFAC32}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C4599CC7-E6D9-48E5-859C-DBED20DFAC32}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {E2B5ED97-FF3F-42AC-9B62-CD7FBD147C9A} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} + {5D575A92-DD1E-48CE-9946-0850F2D1A9E3} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} + {C4599CC7-E6D9-48E5-859C-DBED20DFAC32} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {3B0D7022-4BD5-4969-A20C-1EB8686B74EB} + EndGlobalSection +EndGlobal diff --git a/build.sh b/build.sh new file mode 100644 index 0000000..8477d5a --- /dev/null +++ b/build.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done + +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" +"$scriptroot/eng/common/build.sh" --build --restore $@ diff --git a/eng/Build.props b/eng/Build.props new file mode 100644 index 0000000..38cc0e8 --- /dev/null +++ b/eng/Build.props @@ -0,0 +1,7 @@ + + + + + + + diff --git a/eng/Signing.props b/eng/Signing.props new file mode 100644 index 0000000..4ab0a58 --- /dev/null +++ b/eng/Signing.props @@ -0,0 +1,13 @@ + + + + true + + + + + + + + + diff --git a/eng/Tools.props b/eng/Tools.props new file mode 100644 index 0000000..e8bca61 --- /dev/null +++ b/eng/Tools.props @@ -0,0 +1,7 @@ + + + + + + + diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml new file mode 100644 index 0000000..897829f --- /dev/null +++ b/eng/Version.Details.xml @@ -0,0 +1,19 @@ + + + + + + + + + https://github.com/dotnet/arcade + 39042b4048580366d35a7c1c4f4ce8fc0dbea4b4 + + + + https://github.com/dotnet/arcade + 39042b4048580366d35a7c1c4f4ce8fc0dbea4b4 + + + + diff --git a/eng/Versions.props b/eng/Versions.props new file mode 100644 index 0000000..5868e6e --- /dev/null +++ b/eng/Versions.props @@ -0,0 +1,17 @@ + + + + 1 + 0 + 0 + $(MajorVersion).$(MinorVersion).$(PatchVersion) + alpha + 1 + Alpha $(PreReleaseVersionIteration) + false + release + true + false + + + diff --git a/eng/_._ b/eng/_._ new file mode 100644 index 0000000..5f28270 --- /dev/null +++ b/eng/_._ @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/eng/common/BuildConfiguration/build-configuration.json b/eng/common/BuildConfiguration/build-configuration.json new file mode 100644 index 0000000..3d1cc89 --- /dev/null +++ b/eng/common/BuildConfiguration/build-configuration.json @@ -0,0 +1,4 @@ +{ + "RetryCountLimit": 1, + "RetryByAnyError": false +} diff --git a/eng/common/CIBuild.cmd b/eng/common/CIBuild.cmd new file mode 100644 index 0000000..56c2f25 --- /dev/null +++ b/eng/common/CIBuild.cmd @@ -0,0 +1,2 @@ +@echo off +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*" \ No newline at end of file diff --git a/eng/common/PSScriptAnalyzerSettings.psd1 b/eng/common/PSScriptAnalyzerSettings.psd1 new file mode 100644 index 0000000..4c1ea7c --- /dev/null +++ b/eng/common/PSScriptAnalyzerSettings.psd1 @@ -0,0 +1,11 @@ +@{ + IncludeRules=@('PSAvoidUsingCmdletAliases', + 'PSAvoidUsingWMICmdlet', + 'PSAvoidUsingPositionalParameters', + 'PSAvoidUsingInvokeExpression', + 'PSUseDeclaredVarsMoreThanAssignments', + 'PSUseCmdletCorrectly', + 'PSStandardDSCFunctionsInResource', + 'PSUseIdenticalMandatoryParametersForDSC', + 'PSUseIdenticalParametersForDSC') +} \ No newline at end of file diff --git a/eng/common/README.md b/eng/common/README.md new file mode 100644 index 0000000..ff49c37 --- /dev/null +++ b/eng/common/README.md @@ -0,0 +1,28 @@ +# Don't touch this folder + + uuuuuuuuuuuuuuuuuuuu + u" uuuuuuuuuuuuuuuuuu "u + u" u$$$$$$$$$$$$$$$$$$$$u "u + u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u + u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u + u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u + u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u + $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $ + $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $ + $ $$$" ... "$... ...$" ... "$$$ ... "$$$ $ + $ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $ + $ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $ + $ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $ + $ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $ + $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $ + "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u" + "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u" + "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u" + "u "$$$$$$$$$$$$$$$$$$$$$$$$" u" + "u "$$$$$$$$$$$$$$$$$$$$" u" + "u """""""""""""""""" u" + """""""""""""""""""" + +!!! Changes made in this directory are subject to being overwritten by automation !!! + +The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first. diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 new file mode 100644 index 0000000..6c65e81 --- /dev/null +++ b/eng/common/SetupNugetSources.ps1 @@ -0,0 +1,167 @@ +# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds. +# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080 +# +# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry +# under for each Maestro managed private feed. Two additional credential +# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport. +# +# This script needs to be called in every job that will restore packages and which the base repo has +# private AzDO feeds in the NuGet.config. +# +# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)` +# from the AzureDevOps-Artifact-Feeds-Pats variable group. +# +# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing +# +# - task: PowerShell@2 +# displayName: Setup Private Feeds Credentials +# condition: eq(variables['Agent.OS'], 'Windows_NT') +# inputs: +# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 +# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token +# env: +# Token: $(dn-bot-dnceng-artifact-feeds-rw) + +[CmdletBinding()] +param ( + [Parameter(Mandatory = $true)][string]$ConfigFile, + [Parameter(Mandatory = $true)][string]$Password +) + +$ErrorActionPreference = "Stop" +Set-StrictMode -Version 2.0 +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + +. $PSScriptRoot\tools.ps1 + +# Add source entry to PackageSources +function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $Password) { + $packageSource = $sources.SelectSingleNode("add[@key='$SourceName']") + + if ($packageSource -eq $null) + { + $packageSource = $doc.CreateElement("add") + $packageSource.SetAttribute("key", $SourceName) + $packageSource.SetAttribute("value", $SourceEndPoint) + $sources.AppendChild($packageSource) | Out-Null + } + else { + Write-Host "Package source $SourceName already present." + } + + AddCredential -Creds $creds -Source $SourceName -Username $Username -Password $Password +} + +# Add a credential node for the specified source +function AddCredential($creds, $source, $username, $password) { + # Looks for credential configuration for the given SourceName. Create it if none is found. + $sourceElement = $creds.SelectSingleNode($Source) + if ($sourceElement -eq $null) + { + $sourceElement = $doc.CreateElement($Source) + $creds.AppendChild($sourceElement) | Out-Null + } + + # Add the node to the credential if none is found. + $usernameElement = $sourceElement.SelectSingleNode("add[@key='Username']") + if ($usernameElement -eq $null) + { + $usernameElement = $doc.CreateElement("add") + $usernameElement.SetAttribute("key", "Username") + $sourceElement.AppendChild($usernameElement) | Out-Null + } + $usernameElement.SetAttribute("value", $Username) + + # Add the to the credential if none is found. + # Add it as a clear text because there is no support for encrypted ones in non-windows .Net SDKs. + # -> https://github.com/NuGet/Home/issues/5526 + $passwordElement = $sourceElement.SelectSingleNode("add[@key='ClearTextPassword']") + if ($passwordElement -eq $null) + { + $passwordElement = $doc.CreateElement("add") + $passwordElement.SetAttribute("key", "ClearTextPassword") + $sourceElement.AppendChild($passwordElement) | Out-Null + } + $passwordElement.SetAttribute("value", $Password) +} + +function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Password) { + $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]") + + Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds." + + ForEach ($PackageSource in $maestroPrivateSources) { + Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key + AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -Password $Password + } +} + +function EnablePrivatePackageSources($DisabledPackageSources) { + $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]") + ForEach ($DisabledPackageSource in $maestroPrivateSources) { + Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource" + # Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries + $DisabledPackageSources.RemoveChild($DisabledPackageSource) + } +} + +if (!(Test-Path $ConfigFile -PathType Leaf)) { + Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile" + ExitWithExitCode 1 +} + +if (!$Password) { + Write-PipelineTelemetryError -Category 'Build' -Message 'Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Please supply a valid PAT' + ExitWithExitCode 1 +} + +# Load NuGet.config +$doc = New-Object System.Xml.XmlDocument +$filename = (Get-Item $ConfigFile).FullName +$doc.Load($filename) + +# Get reference to or create one if none exist already +$sources = $doc.DocumentElement.SelectSingleNode("packageSources") +if ($sources -eq $null) { + $sources = $doc.CreateElement("packageSources") + $doc.DocumentElement.AppendChild($sources) | Out-Null +} + +# Looks for a node. Create it if none is found. +$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials") +if ($creds -eq $null) { + $creds = $doc.CreateElement("packageSourceCredentials") + $doc.DocumentElement.AppendChild($creds) | Out-Null +} + +# Check for disabledPackageSources; we'll enable any darc-int ones we find there +$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources") +if ($disabledSources -ne $null) { + Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node" + EnablePrivatePackageSources -DisabledPackageSources $disabledSources +} + +$userName = "dn-bot" + +# Insert credential nodes for Maestro's private feeds +InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password + +# 3.1 uses a different feed url format so it's handled differently here +$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']") +if ($dotnet31Source -ne $null) { + AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password + AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password +} + +$dotnetVersions = @('5','6','7','8') + +foreach ($dotnetVersion in $dotnetVersions) { + $feedPrefix = "dotnet" + $dotnetVersion; + $dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']") + if ($dotnetSource -ne $null) { + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password + } +} + +$doc.Save($filename) diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh new file mode 100644 index 0000000..d387c7e --- /dev/null +++ b/eng/common/SetupNugetSources.sh @@ -0,0 +1,171 @@ +#!/usr/bin/env bash + +# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds. +# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080 +# +# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry +# under for each Maestro's managed private feed. Two additional credential +# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport. +# +# This script needs to be called in every job that will restore packages and which the base repo has +# private AzDO feeds in the NuGet.config. +# +# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)` +# from the AzureDevOps-Artifact-Feeds-Pats variable group. +# +# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing. +# +# - task: Bash@3 +# displayName: Setup Private Feeds Credentials +# inputs: +# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh +# arguments: $(Build.SourcesDirectory)/NuGet.config $Token +# condition: ne(variables['Agent.OS'], 'Windows_NT') +# env: +# Token: $(dn-bot-dnceng-artifact-feeds-rw) + +ConfigFile=$1 +CredToken=$2 +NL='\n' +TB=' ' + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/tools.sh" + +if [ ! -f "$ConfigFile" ]; then + Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile" + ExitWithExitCode 1 +fi + +if [ -z "$CredToken" ]; then + Write-PipelineTelemetryError -category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Please supply a valid PAT" + ExitWithExitCode 1 +fi + +if [[ `uname -s` == "Darwin" ]]; then + NL=$'\\\n' + TB='' +fi + +# Ensure there is a ... section. +grep -i "" $ConfigFile +if [ "$?" != "0" ]; then + echo "Adding ... section." + ConfigNodeHeader="" + PackageSourcesTemplate="${TB}${NL}${TB}" + + sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile +fi + +# Ensure there is a ... section. +grep -i "" $ConfigFile +if [ "$?" != "0" ]; then + echo "Adding ... section." + + PackageSourcesNodeFooter="" + PackageSourceCredentialsTemplate="${TB}${NL}${TB}" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile +fi + +PackageSources=() + +# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present +grep -i "" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=('dotnet3.1-internal') + + grep -i "" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding dotnet3.1-internal-transport to the packageSources." + PackageSourcesNodeFooter="" + PackageSourceTemplate="${TB}" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=('dotnet3.1-internal-transport') +fi + +DotNetVersions=('5' '6' '7' '8') + +for DotNetVersion in ${DotNetVersions[@]} ; do + FeedPrefix="dotnet${DotNetVersion}"; + grep -i "" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=("$FeedPrefix-internal") + + grep -i "" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding $FeedPrefix-internal-transport to the packageSources." + PackageSourcesNodeFooter="" + PackageSourceTemplate="${TB}" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=("$FeedPrefix-internal-transport") + fi +done + +# I want things split line by line +PrevIFS=$IFS +IFS=$'\n' +PackageSources+="$IFS" +PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"') +IFS=$PrevIFS + +for FeedName in ${PackageSources[@]} ; do + # Check if there is no existing credential for this FeedName + grep -i "<$FeedName>" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding credentials for $FeedName." + + PackageSourceCredentialsNodeFooter="" + NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}" + + sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile + fi +done + +# Re-enable any entries in disabledPackageSources where the feed name contains darc-int +grep -i "" $ConfigFile +if [ "$?" == "0" ]; then + DisabledDarcIntSources=() + echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile" + DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"') + for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do + if [[ $DisabledSourceName == darc-int* ]] + then + OldDisableValue="" + NewDisableValue="" + sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile + echo "Neutralized disablePackageSources entry for '$DisabledSourceName'" + fi + done +fi diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 new file mode 100644 index 0000000..33a6f2d --- /dev/null +++ b/eng/common/build.ps1 @@ -0,0 +1,166 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string][Alias('c')]$configuration = "Debug", + [string]$platform = $null, + [string] $projects, + [string][Alias('v')]$verbosity = "minimal", + [string] $msbuildEngine = $null, + [bool] $warnAsError = $true, + [bool] $nodeReuse = $true, + [switch][Alias('r')]$restore, + [switch] $deployDeps, + [switch][Alias('b')]$build, + [switch] $rebuild, + [switch] $deploy, + [switch][Alias('t')]$test, + [switch] $integrationTest, + [switch] $performanceTest, + [switch] $sign, + [switch] $pack, + [switch] $publish, + [switch] $clean, + [switch][Alias('bl')]$binaryLog, + [switch][Alias('nobl')]$excludeCIBinarylog, + [switch] $ci, + [switch] $prepareMachine, + [string] $runtimeSourceFeed = '', + [string] $runtimeSourceFeedKey = '', + [switch] $excludePrereleaseVS, + [switch] $nativeToolsOnMachine, + [switch] $help, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties +) + +# Unset 'Platform' environment variable to avoid unwanted collision in InstallDotNetCore.targets file +# some computer has this env var defined (e.g. Some HP) +if($env:Platform) { + $env:Platform="" +} +function Print-Usage() { + Write-Host "Common settings:" + Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)" + Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild" + Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)" + Write-Host " -binaryLog Output binary log (short: -bl)" + Write-Host " -help Print help and exit" + Write-Host "" + + Write-Host "Actions:" + Write-Host " -restore Restore dependencies (short: -r)" + Write-Host " -build Build solution (short: -b)" + Write-Host " -rebuild Rebuild solution" + Write-Host " -deploy Deploy built VSIXes" + Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)" + Write-Host " -test Run all unit tests in the solution (short: -t)" + Write-Host " -integrationTest Run all integration tests in the solution" + Write-Host " -performanceTest Run all performance tests in the solution" + Write-Host " -pack Package build outputs into NuGet packages and Willow components" + Write-Host " -sign Sign build outputs" + Write-Host " -publish Publish artifacts (e.g. symbols)" + Write-Host " -clean Clean the solution" + Write-Host "" + + Write-Host "Advanced settings:" + Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)" + Write-Host " -ci Set when running on CI server" + Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)" + Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build" + Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" + Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." + Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio" + Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)" + Write-Host "" + + Write-Host "Command line arguments not listed above are passed thru to msbuild." + Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)." +} + +. $PSScriptRoot\tools.ps1 + +function InitializeCustomToolset { + if (-not $restore) { + return + } + + $script = Join-Path $EngRoot 'restore-toolset.ps1' + + if (Test-Path $script) { + . $script + } +} + +function Build { + $toolsetBuildProj = InitializeToolset + InitializeCustomToolset + + $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' } + $platformArg = if ($platform) { "/p:Platform=$platform" } else { '' } + + if ($projects) { + # Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons. + # Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty. + [string[]] $msbuildArgs = $properties + + # Resolve relative project paths into full paths + $projects = ($projects.Split(';').ForEach({Resolve-Path $_}) -join ';') + + $msbuildArgs += "/p:Projects=$projects" + $properties = $msbuildArgs + } + + MSBuild $toolsetBuildProj ` + $bl ` + $platformArg ` + /p:Configuration=$configuration ` + /p:RepoRoot=$RepoRoot ` + /p:Restore=$restore ` + /p:DeployDeps=$deployDeps ` + /p:Build=$build ` + /p:Rebuild=$rebuild ` + /p:Deploy=$deploy ` + /p:Test=$test ` + /p:Pack=$pack ` + /p:IntegrationTest=$integrationTest ` + /p:PerformanceTest=$performanceTest ` + /p:Sign=$sign ` + /p:Publish=$publish ` + @properties +} + +try { + if ($clean) { + if (Test-Path $ArtifactsDir) { + Remove-Item -Recurse -Force $ArtifactsDir + Write-Host 'Artifacts directory deleted.' + } + exit 0 + } + + if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) { + Print-Usage + exit 0 + } + + if ($ci) { + if (-not $excludeCIBinarylog) { + $binaryLog = $true + } + $nodeReuse = $false + } + + if ($nativeToolsOnMachine) { + $env:NativeToolsOnMachine = $true + } + if ($restore) { + InitializeNativeTools + } + + Build +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + ExitWithExitCode 1 +} + +ExitWithExitCode 0 diff --git a/eng/common/build.sh b/eng/common/build.sh new file mode 100644 index 0000000..50af40c --- /dev/null +++ b/eng/common/build.sh @@ -0,0 +1,247 @@ +#!/usr/bin/env bash + +# Stop script if unbound variable found (use ${var:-} if intentional) +set -u + +# Stop script if command returns non-zero exit code. +# Prevents hidden errors caused by missing error code propagation. +set -e + +usage() +{ + echo "Common settings:" + echo " --configuration Build configuration: 'Debug' or 'Release' (short: -c)" + echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)" + echo " --binaryLog Create MSBuild binary log (short: -bl)" + echo " --help Print help and exit (short: -h)" + echo "" + + echo "Actions:" + echo " --restore Restore dependencies (short: -r)" + echo " --build Build solution (short: -b)" + echo " --sourceBuild Source-build the solution (short: -sb)" + echo " Will additionally trigger the following actions: --restore, --build, --pack" + echo " If --configuration is not set explicitly, will also set it to 'Release'" + echo " --rebuild Rebuild solution" + echo " --test Run all unit tests in the solution (short: -t)" + echo " --integrationTest Run all integration tests in the solution" + echo " --performanceTest Run all performance tests in the solution" + echo " --pack Package build outputs into NuGet packages and Willow components" + echo " --sign Sign build outputs" + echo " --publish Publish artifacts (e.g. symbols)" + echo " --clean Clean the solution" + echo "" + + echo "Advanced settings:" + echo " --projects Project or solution file(s) to build" + echo " --ci Set when running on CI server" + echo " --excludeCIBinarylog Don't output binary log (short: -nobl)" + echo " --prepareMachine Prepare machine for CI run, clean up processes after build" + echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" + echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" + echo "" + echo "Command line arguments not listed above are passed thru to msbuild." + echo "Arguments can also be passed in with a single hyphen." +} + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +restore=false +build=false +source_build=false +rebuild=false +test=false +integration_test=false +performance_test=false +pack=false +publish=false +sign=false +public=false +ci=false +clean=false + +warn_as_error=true +node_reuse=true +binary_log=false +exclude_ci_binary_log=false +pipelines_log=false + +projects='' +configuration='' +prepare_machine=false +verbosity='minimal' +runtime_source_feed='' +runtime_source_feed_key='' + +properties='' +while [[ $# > 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -help|-h) + usage + exit 0 + ;; + -clean) + clean=true + ;; + -configuration|-c) + configuration=$2 + shift + ;; + -verbosity|-v) + verbosity=$2 + shift + ;; + -binarylog|-bl) + binary_log=true + ;; + -excludeCIBinarylog|-nobl) + exclude_ci_binary_log=true + ;; + -pipelineslog|-pl) + pipelines_log=true + ;; + -restore|-r) + restore=true + ;; + -build|-b) + build=true + ;; + -rebuild) + rebuild=true + ;; + -pack) + pack=true + ;; + -sourcebuild|-sb) + build=true + source_build=true + restore=true + pack=true + ;; + -test|-t) + test=true + ;; + -integrationtest) + integration_test=true + ;; + -performancetest) + performance_test=true + ;; + -sign) + sign=true + ;; + -publish) + publish=true + ;; + -preparemachine) + prepare_machine=true + ;; + -projects) + projects=$2 + shift + ;; + -ci) + ci=true + ;; + -warnaserror) + warn_as_error=$2 + shift + ;; + -nodereuse) + node_reuse=$2 + shift + ;; + -runtimesourcefeed) + runtime_source_feed=$2 + shift + ;; + -runtimesourcefeedkey) + runtime_source_feed_key=$2 + shift + ;; + *) + properties="$properties $1" + ;; + esac + + shift +done + +if [[ -z "$configuration" ]]; then + if [[ "$source_build" = true ]]; then configuration="Release"; else configuration="Debug"; fi +fi + +if [[ "$ci" == true ]]; then + pipelines_log=true + node_reuse=false + if [[ "$exclude_ci_binary_log" == false ]]; then + binary_log=true + fi +fi + +. "$scriptroot/tools.sh" + +function InitializeCustomToolset { + local script="$eng_root/restore-toolset.sh" + + if [[ -a "$script" ]]; then + . "$script" + fi +} + +function Build { + InitializeToolset + InitializeCustomToolset + + if [[ ! -z "$projects" ]]; then + properties="$properties /p:Projects=$projects" + fi + + local bl="" + if [[ "$binary_log" == true ]]; then + bl="/bl:\"$log_dir/Build.binlog\"" + fi + + MSBuild $_InitializeToolset \ + $bl \ + /p:Configuration=$configuration \ + /p:RepoRoot="$repo_root" \ + /p:Restore=$restore \ + /p:Build=$build \ + /p:ArcadeBuildFromSource=$source_build \ + /p:Rebuild=$rebuild \ + /p:Test=$test \ + /p:Pack=$pack \ + /p:IntegrationTest=$integration_test \ + /p:PerformanceTest=$performance_test \ + /p:Sign=$sign \ + /p:Publish=$publish \ + $properties + + ExitWithExitCode 0 +} + +if [[ "$clean" == true ]]; then + if [ -d "$artifacts_dir" ]; then + rm -rf $artifacts_dir + echo "Artifacts directory deleted." + fi + exit 0 +fi + +if [[ "$restore" == true ]]; then + InitializeNativeTools +fi + +Build diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh new file mode 100644 index 0000000..1a02c0d --- /dev/null +++ b/eng/common/cibuild.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where + # the symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@ \ No newline at end of file diff --git a/eng/common/cross/arm/sources.list.bionic b/eng/common/cross/arm/sources.list.bionic new file mode 100644 index 0000000..2109557 --- /dev/null +++ b/eng/common/cross/arm/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.focal b/eng/common/cross/arm/sources.list.focal new file mode 100644 index 0000000..4de2600 --- /dev/null +++ b/eng/common/cross/arm/sources.list.focal @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.jammy b/eng/common/cross/arm/sources.list.jammy new file mode 100644 index 0000000..6bb0453 --- /dev/null +++ b/eng/common/cross/arm/sources.list.jammy @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.jessie b/eng/common/cross/arm/sources.list.jessie new file mode 100644 index 0000000..4d142ac --- /dev/null +++ b/eng/common/cross/arm/sources.list.jessie @@ -0,0 +1,3 @@ +# Debian (sid) # UNSTABLE +deb http://ftp.debian.org/debian/ sid main contrib non-free +deb-src http://ftp.debian.org/debian/ sid main contrib non-free diff --git a/eng/common/cross/arm/sources.list.trusty b/eng/common/cross/arm/sources.list.trusty new file mode 100644 index 0000000..07d8f88 --- /dev/null +++ b/eng/common/cross/arm/sources.list.trusty @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial new file mode 100644 index 0000000..56fbb36 --- /dev/null +++ b/eng/common/cross/arm/sources.list.xenial @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.zesty b/eng/common/cross/arm/sources.list.zesty new file mode 100644 index 0000000..ea2c14a --- /dev/null +++ b/eng/common/cross/arm/sources.list.zesty @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse diff --git a/eng/common/cross/arm/tizen-build-rootfs.sh b/eng/common/cross/arm/tizen-build-rootfs.sh new file mode 100644 index 0000000..9fdb32e --- /dev/null +++ b/eng/common/cross/arm/tizen-build-rootfs.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +set -e + +__ARM_HARDFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__TIZEN_CROSSDIR="$__ARM_HARDFP_CrossDir/tizen" + +if [[ -z "$ROOTFS_DIR" ]]; then + echo "ROOTFS_DIR is not defined." + exit 1; +fi + +TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp +mkdir -p $TIZEN_TMP_DIR + +# Download files +echo ">>Start downloading files" +VERBOSE=1 $__ARM_HARDFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR +echo "<>Start constructing Tizen rootfs" +TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm` +cd $ROOTFS_DIR +for f in $TIZEN_RPM_FILES; do + rpm2cpio $f | cpio -idm --quiet +done +echo "<>Start configuring Tizen rootfs" +ln -sfn asm-arm ./usr/include/asm +patch -p1 < $__TIZEN_CROSSDIR/tizen.patch +echo "</dev/null; then + VERBOSE=0 +fi + +Log() +{ + if [ $VERBOSE -ge $1 ]; then + echo ${@:2} + fi +} + +Inform() +{ + Log 1 -e "\x1B[0;34m$@\x1B[m" +} + +Debug() +{ + Log 2 -e "\x1B[0;32m$@\x1B[m" +} + +Error() +{ + >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" +} + +Fetch() +{ + URL=$1 + FILE=$2 + PROGRESS=$3 + if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then + CURL_OPT="--progress-bar" + else + CURL_OPT="--silent" + fi + curl $CURL_OPT $URL > $FILE +} + +hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } +hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } +hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } + +TMPDIR=$1 +if [ ! -d $TMPDIR ]; then + TMPDIR=./tizen_tmp + Debug "Create temporary directory : $TMPDIR" + mkdir -p $TMPDIR +fi + +TIZEN_URL=http://download.tizen.org/snapshots/tizen +BUILD_XML=build.xml +REPOMD_XML=repomd.xml +PRIMARY_XML=primary.xml +TARGET_URL="http://__not_initialized" + +Xpath_get() +{ + XPATH_RESULT='' + XPATH=$1 + XML_FILE=$2 + RESULT=$(xmllint --xpath $XPATH $XML_FILE) + if [[ -z ${RESULT// } ]]; then + Error "Can not find target from $XML_FILE" + Debug "Xpath = $XPATH" + exit 1 + fi + XPATH_RESULT=$RESULT +} + +fetch_tizen_pkgs_init() +{ + TARGET=$1 + PROFILE=$2 + Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" + + TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs + if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi + mkdir -p $TMP_PKG_DIR + + PKG_URL=$TIZEN_URL/$PROFILE/latest + + BUILD_XML_URL=$PKG_URL/$BUILD_XML + TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML + TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML + TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML + TMP_PRIMARYGZ=${TMP_PRIMARY}.gz + + Fetch $BUILD_XML_URL $TMP_BUILD + + Debug "fetch $BUILD_XML_URL to $TMP_BUILD" + + TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" + Xpath_get $TARGET_XPATH $TMP_BUILD + TARGET_PATH=$XPATH_RESULT + TARGET_URL=$PKG_URL/$TARGET_PATH + + REPOMD_URL=$TARGET_URL/repodata/repomd.xml + PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' + + Fetch $REPOMD_URL $TMP_REPOMD + + Debug "fetch $REPOMD_URL to $TMP_REPOMD" + + Xpath_get $PRIMARY_XPATH $TMP_REPOMD + PRIMARY_XML_PATH=$XPATH_RESULT + PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH + + Fetch $PRIMARY_URL $TMP_PRIMARYGZ + + Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" + + gunzip $TMP_PRIMARYGZ + + Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" +} + +fetch_tizen_pkgs() +{ + ARCH=$1 + PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' + + PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' + + for pkg in ${@:2} + do + Inform "Fetching... $pkg" + XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + PKG_PATH=$XPATH_RESULT + + XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + CHECKSUM=$XPATH_RESULT + + PKG_URL=$TARGET_URL/$PKG_PATH + PKG_FILE=$(basename $PKG_PATH) + PKG_PATH=$TMPDIR/$PKG_FILE + + Debug "Download $PKG_URL to $PKG_PATH" + Fetch $PKG_URL $PKG_PATH true + + echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null + if [ $? -ne 0 ]; then + Error "Fail to fetch $PKG_URL to $PKG_PATH" + Debug "Checksum = $CHECKSUM" + exit 1 + fi + done +} + +Inform "Initialize arm base" +fetch_tizen_pkgs_init standard base +Inform "fetch common packages" +fetch_tizen_pkgs armv7hl gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils +Inform "fetch coreclr packages" +fetch_tizen_pkgs armv7hl lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu +Inform "fetch corefx packages" +fetch_tizen_pkgs armv7hl libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel + +Inform "Initialize standard unified" +fetch_tizen_pkgs_init standard unified +Inform "fetch corefx packages" +fetch_tizen_pkgs armv7hl gssdp gssdp-devel tizen-release + diff --git a/eng/common/cross/arm/tizen/tizen.patch b/eng/common/cross/arm/tizen/tizen.patch new file mode 100644 index 0000000..fb12ade --- /dev/null +++ b/eng/common/cross/arm/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-littlearm) +-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) ) diff --git a/eng/common/cross/arm/trusty-lttng-2.4.patch b/eng/common/cross/arm/trusty-lttng-2.4.patch new file mode 100644 index 0000000..8e4dd7a --- /dev/null +++ b/eng/common/cross/arm/trusty-lttng-2.4.patch @@ -0,0 +1,71 @@ +From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001 +From: Mathieu Desnoyers +Date: Thu, 7 May 2015 13:25:04 -0400 +Subject: [PATCH] Fix: building probe providers with C++ compiler + +Robert Daniels wrote: +> > I'm attempting to use lttng userspace tracing with a C++ application +> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6 +> > release of lttng. I've compiled lttng-modules, lttng-ust, and +> > lttng-tools and have been able to get a simple test working with C +> > code. When I attempt to run the hello.cxx test on my target it will +> > segfault. +> +> +> I spent a little time digging into this issue and finally discovered the +> cause of my segfault with ARM C++ tracepoints. +> +> There is a struct called 'lttng_event' in ust-events.h which contains an +> empty union 'u'. This was the cause of my issue. Under C, this empty union +> compiles to a zero byte member while under C++ it compiles to a one byte +> member, and in my case was four-byte aligned which caused my C++ code to +> have the 'cds_list_head node' offset incorrectly by four bytes. This lead +> to an incorrect linked list structure which caused my issue. +> +> Since this union is empty, I simply removed it from the struct and everything +> worked correctly. +> +> I don't know the history or purpose behind this empty union so I'd like to +> know if this is a safe fix. If it is I can submit a patch with the union +> removed. + +That's a very nice catch! + +We do not support building tracepoint probe provider with +g++ yet, as stated in lttng-ust(3): + +"- Note for C++ support: although an application instrumented with + tracepoints can be compiled with g++, tracepoint probes should be + compiled with gcc (only tested with gcc so far)." + +However, if it works fine with this fix, then I'm tempted to take it, +especially because removing the empty union does not appear to affect +the layout of struct lttng_event as seen from liblttng-ust, which must +be compiled with a C compiler, and from probe providers compiled with +a C compiler. So all we are changing is the layout of a probe provider +compiled with a C++ compiler, which is anyway buggy at the moment, +because it is not compatible with the layout expected by liblttng-ust +compiled with a C compiler. + +Reported-by: Robert Daniels +Signed-off-by: Mathieu Desnoyers +--- + include/lttng/ust-events.h | 2 -- + 1 file changed, 2 deletions(-) + +diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h +index 328a875..3d7a274 100644 +--- a/usr/include/lttng/ust-events.h ++++ b/usr/include/lttng/ust-events.h +@@ -407,8 +407,6 @@ struct lttng_event { + void *_deprecated1; + struct lttng_ctx *ctx; + enum lttng_ust_instrumentation instrumentation; +- union { +- } u; + struct cds_list_head node; /* Event list in session */ + struct cds_list_head _deprecated2; + void *_deprecated3; +-- +2.7.4 + diff --git a/eng/common/cross/arm/trusty.patch b/eng/common/cross/arm/trusty.patch new file mode 100644 index 0000000..2f2972f --- /dev/null +++ b/eng/common/cross/arm/trusty.patch @@ -0,0 +1,97 @@ +diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h +--- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900 ++++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900 +@@ -65,17 +65,17 @@ + switch (len) { + #ifdef UATOMIC_HAS_ATOMIC_BYTE + case 1: +- return __sync_val_compare_and_swap_1(addr, old, _new); ++ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new); + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- return __sync_val_compare_and_swap_2(addr, old, _new); ++ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new); + #endif + case 4: +- return __sync_val_compare_and_swap_4(addr, old, _new); ++ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new); + #if (CAA_BITS_PER_LONG == 64) + case 8: +- return __sync_val_compare_and_swap_8(addr, old, _new); ++ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new); + #endif + } + _uatomic_link_error(); +@@ -100,20 +100,20 @@ + switch (len) { + #ifdef UATOMIC_HAS_ATOMIC_BYTE + case 1: +- __sync_and_and_fetch_1(addr, val); ++ __sync_and_and_fetch_1((uint8_t *) addr, val); + return; + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- __sync_and_and_fetch_2(addr, val); ++ __sync_and_and_fetch_2((uint16_t *) addr, val); + return; + #endif + case 4: +- __sync_and_and_fetch_4(addr, val); ++ __sync_and_and_fetch_4((uint32_t *) addr, val); + return; + #if (CAA_BITS_PER_LONG == 64) + case 8: +- __sync_and_and_fetch_8(addr, val); ++ __sync_and_and_fetch_8((uint64_t *) addr, val); + return; + #endif + } +@@ -139,20 +139,20 @@ + switch (len) { + #ifdef UATOMIC_HAS_ATOMIC_BYTE + case 1: +- __sync_or_and_fetch_1(addr, val); ++ __sync_or_and_fetch_1((uint8_t *) addr, val); + return; + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- __sync_or_and_fetch_2(addr, val); ++ __sync_or_and_fetch_2((uint16_t *) addr, val); + return; + #endif + case 4: +- __sync_or_and_fetch_4(addr, val); ++ __sync_or_and_fetch_4((uint32_t *) addr, val); + return; + #if (CAA_BITS_PER_LONG == 64) + case 8: +- __sync_or_and_fetch_8(addr, val); ++ __sync_or_and_fetch_8((uint64_t *) addr, val); + return; + #endif + } +@@ -180,17 +180,17 @@ + switch (len) { + #ifdef UATOMIC_HAS_ATOMIC_BYTE + case 1: +- return __sync_add_and_fetch_1(addr, val); ++ return __sync_add_and_fetch_1((uint8_t *) addr, val); + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- return __sync_add_and_fetch_2(addr, val); ++ return __sync_add_and_fetch_2((uint16_t *) addr, val); + #endif + case 4: +- return __sync_add_and_fetch_4(addr, val); ++ return __sync_add_and_fetch_4((uint32_t *) addr, val); + #if (CAA_BITS_PER_LONG == 64) + case 8: +- return __sync_add_and_fetch_8(addr, val); ++ return __sync_add_and_fetch_8((uint64_t *) addr, val); + #endif + } + _uatomic_link_error(); diff --git a/eng/common/cross/arm64/sources.list.bionic b/eng/common/cross/arm64/sources.list.bionic new file mode 100644 index 0000000..2109557 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.buster b/eng/common/cross/arm64/sources.list.buster new file mode 100644 index 0000000..7194ac6 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.buster @@ -0,0 +1,11 @@ +deb http://deb.debian.org/debian buster main +deb-src http://deb.debian.org/debian buster main + +deb http://deb.debian.org/debian-security/ buster/updates main +deb-src http://deb.debian.org/debian-security/ buster/updates main + +deb http://deb.debian.org/debian buster-updates main +deb-src http://deb.debian.org/debian buster-updates main + +deb http://deb.debian.org/debian buster-backports main contrib non-free +deb-src http://deb.debian.org/debian buster-backports main contrib non-free diff --git a/eng/common/cross/arm64/sources.list.focal b/eng/common/cross/arm64/sources.list.focal new file mode 100644 index 0000000..4de2600 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.focal @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.jammy b/eng/common/cross/arm64/sources.list.jammy new file mode 100644 index 0000000..6bb0453 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.jammy @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.stretch b/eng/common/cross/arm64/sources.list.stretch new file mode 100644 index 0000000..0e12157 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.stretch @@ -0,0 +1,12 @@ +deb http://deb.debian.org/debian stretch main +deb-src http://deb.debian.org/debian stretch main + +deb http://deb.debian.org/debian-security/ stretch/updates main +deb-src http://deb.debian.org/debian-security/ stretch/updates main + +deb http://deb.debian.org/debian stretch-updates main +deb-src http://deb.debian.org/debian stretch-updates main + +deb http://deb.debian.org/debian stretch-backports main contrib non-free +deb-src http://deb.debian.org/debian stretch-backports main contrib non-free + diff --git a/eng/common/cross/arm64/sources.list.trusty b/eng/common/cross/arm64/sources.list.trusty new file mode 100644 index 0000000..07d8f88 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.trusty @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial new file mode 100644 index 0000000..56fbb36 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.xenial @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.zesty b/eng/common/cross/arm64/sources.list.zesty new file mode 100644 index 0000000..ea2c14a --- /dev/null +++ b/eng/common/cross/arm64/sources.list.zesty @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/tizen-build-rootfs.sh b/eng/common/cross/arm64/tizen-build-rootfs.sh new file mode 100644 index 0000000..13bfddb --- /dev/null +++ b/eng/common/cross/arm64/tizen-build-rootfs.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +set -e + +__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__TIZEN_CROSSDIR="$__CrossDir/tizen" + +if [[ -z "$ROOTFS_DIR" ]]; then + echo "ROOTFS_DIR is not defined." + exit 1; +fi + +TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp +mkdir -p $TIZEN_TMP_DIR + +# Download files +echo ">>Start downloading files" +VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR +echo "<>Start constructing Tizen rootfs" +TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm` +cd $ROOTFS_DIR +for f in $TIZEN_RPM_FILES; do + rpm2cpio $f | cpio -idm --quiet +done +echo "<>Start configuring Tizen rootfs" +ln -sfn asm-arm64 ./usr/include/asm +patch -p1 < $__TIZEN_CROSSDIR/tizen.patch +echo "</dev/null; then + VERBOSE=0 +fi + +Log() +{ + if [ $VERBOSE -ge $1 ]; then + echo ${@:2} + fi +} + +Inform() +{ + Log 1 -e "\x1B[0;34m$@\x1B[m" +} + +Debug() +{ + Log 2 -e "\x1B[0;32m$@\x1B[m" +} + +Error() +{ + >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" +} + +Fetch() +{ + URL=$1 + FILE=$2 + PROGRESS=$3 + if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then + CURL_OPT="--progress-bar" + else + CURL_OPT="--silent" + fi + curl $CURL_OPT $URL > $FILE +} + +hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } +hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } +hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } + +TMPDIR=$1 +if [ ! -d $TMPDIR ]; then + TMPDIR=./tizen_tmp + Debug "Create temporary directory : $TMPDIR" + mkdir -p $TMPDIR +fi + +TIZEN_URL=http://download.tizen.org/snapshots/tizen/ +BUILD_XML=build.xml +REPOMD_XML=repomd.xml +PRIMARY_XML=primary.xml +TARGET_URL="http://__not_initialized" + +Xpath_get() +{ + XPATH_RESULT='' + XPATH=$1 + XML_FILE=$2 + RESULT=$(xmllint --xpath $XPATH $XML_FILE) + if [[ -z ${RESULT// } ]]; then + Error "Can not find target from $XML_FILE" + Debug "Xpath = $XPATH" + exit 1 + fi + XPATH_RESULT=$RESULT +} + +fetch_tizen_pkgs_init() +{ + TARGET=$1 + PROFILE=$2 + Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" + + TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs + if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi + mkdir -p $TMP_PKG_DIR + + PKG_URL=$TIZEN_URL/$PROFILE/latest + + BUILD_XML_URL=$PKG_URL/$BUILD_XML + TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML + TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML + TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML + TMP_PRIMARYGZ=${TMP_PRIMARY}.gz + + Fetch $BUILD_XML_URL $TMP_BUILD + + Debug "fetch $BUILD_XML_URL to $TMP_BUILD" + + TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" + Xpath_get $TARGET_XPATH $TMP_BUILD + TARGET_PATH=$XPATH_RESULT + TARGET_URL=$PKG_URL/$TARGET_PATH + + REPOMD_URL=$TARGET_URL/repodata/repomd.xml + PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' + + Fetch $REPOMD_URL $TMP_REPOMD + + Debug "fetch $REPOMD_URL to $TMP_REPOMD" + + Xpath_get $PRIMARY_XPATH $TMP_REPOMD + PRIMARY_XML_PATH=$XPATH_RESULT + PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH + + Fetch $PRIMARY_URL $TMP_PRIMARYGZ + + Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" + + gunzip $TMP_PRIMARYGZ + + Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" +} + +fetch_tizen_pkgs() +{ + ARCH=$1 + PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' + + PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' + + for pkg in ${@:2} + do + Inform "Fetching... $pkg" + XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + PKG_PATH=$XPATH_RESULT + + XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + CHECKSUM=$XPATH_RESULT + + PKG_URL=$TARGET_URL/$PKG_PATH + PKG_FILE=$(basename $PKG_PATH) + PKG_PATH=$TMPDIR/$PKG_FILE + + Debug "Download $PKG_URL to $PKG_PATH" + Fetch $PKG_URL $PKG_PATH true + + echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null + if [ $? -ne 0 ]; then + Error "Fail to fetch $PKG_URL to $PKG_PATH" + Debug "Checksum = $CHECKSUM" + exit 1 + fi + done +} + +Inform "Initialize arm base" +fetch_tizen_pkgs_init standard base +Inform "fetch common packages" +fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils +Inform "fetch coreclr packages" +fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu +Inform "fetch corefx packages" +fetch_tizen_pkgs aarch64 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel + +Inform "Initialize standard unified" +fetch_tizen_pkgs_init standard unified +Inform "fetch corefx packages" +fetch_tizen_pkgs aarch64 gssdp gssdp-devel tizen-release + diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch new file mode 100644 index 0000000..af7c8be --- /dev/null +++ b/eng/common/cross/arm64/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf64-littleaarch64) +-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) ) diff --git a/eng/common/cross/armel/armel.jessie.patch b/eng/common/cross/armel/armel.jessie.patch new file mode 100644 index 0000000..2d26156 --- /dev/null +++ b/eng/common/cross/armel/armel.jessie.patch @@ -0,0 +1,43 @@ +diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h +--- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700 ++++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700 +@@ -69,10 +69,10 @@ + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- return __sync_val_compare_and_swap_2(addr, old, _new); ++ return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new); + #endif + case 4: +- return __sync_val_compare_and_swap_4(addr, old, _new); ++ return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new); + #if (CAA_BITS_PER_LONG == 64) + case 8: + return __sync_val_compare_and_swap_8(addr, old, _new); +@@ -109,7 +109,7 @@ + return; + #endif + case 4: +- __sync_and_and_fetch_4(addr, val); ++ __sync_and_and_fetch_4((uint32_t*) addr, val); + return; + #if (CAA_BITS_PER_LONG == 64) + case 8: +@@ -148,7 +148,7 @@ + return; + #endif + case 4: +- __sync_or_and_fetch_4(addr, val); ++ __sync_or_and_fetch_4((uint32_t*) addr, val); + return; + #if (CAA_BITS_PER_LONG == 64) + case 8: +@@ -187,7 +187,7 @@ + return __sync_add_and_fetch_2(addr, val); + #endif + case 4: +- return __sync_add_and_fetch_4(addr, val); ++ return __sync_add_and_fetch_4((uint32_t*) addr, val); + #if (CAA_BITS_PER_LONG == 64) + case 8: + return __sync_add_and_fetch_8(addr, val); diff --git a/eng/common/cross/armel/sources.list.jessie b/eng/common/cross/armel/sources.list.jessie new file mode 100644 index 0000000..3d9c305 --- /dev/null +++ b/eng/common/cross/armel/sources.list.jessie @@ -0,0 +1,3 @@ +# Debian (jessie) # Stable +deb http://ftp.debian.org/debian/ jessie main contrib non-free +deb-src http://ftp.debian.org/debian/ jessie main contrib non-free diff --git a/eng/common/cross/armel/tizen-build-rootfs.sh b/eng/common/cross/armel/tizen-build-rootfs.sh new file mode 100644 index 0000000..9a4438a --- /dev/null +++ b/eng/common/cross/armel/tizen-build-rootfs.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +set -e + +__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen" + +if [[ -z "$ROOTFS_DIR" ]]; then + echo "ROOTFS_DIR is not defined." + exit 1; +fi + +TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp +mkdir -p $TIZEN_TMP_DIR + +# Download files +echo ">>Start downloading files" +VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR +echo "<>Start constructing Tizen rootfs" +TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm` +cd $ROOTFS_DIR +for f in $TIZEN_RPM_FILES; do + rpm2cpio $f | cpio -idm --quiet +done +echo "<>Start configuring Tizen rootfs" +ln -sfn asm-arm ./usr/include/asm +patch -p1 < $__TIZEN_CROSSDIR/tizen.patch +echo "</dev/null; then + VERBOSE=0 +fi + +Log() +{ + if [ $VERBOSE -ge $1 ]; then + echo ${@:2} + fi +} + +Inform() +{ + Log 1 -e "\x1B[0;34m$@\x1B[m" +} + +Debug() +{ + Log 2 -e "\x1B[0;32m$@\x1B[m" +} + +Error() +{ + >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" +} + +Fetch() +{ + URL=$1 + FILE=$2 + PROGRESS=$3 + if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then + CURL_OPT="--progress-bar" + else + CURL_OPT="--silent" + fi + curl $CURL_OPT $URL > $FILE +} + +hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } +hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } +hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } + +TMPDIR=$1 +if [ ! -d $TMPDIR ]; then + TMPDIR=./tizen_tmp + Debug "Create temporary directory : $TMPDIR" + mkdir -p $TMPDIR +fi + +TIZEN_URL=http://download.tizen.org/snapshots/tizen +BUILD_XML=build.xml +REPOMD_XML=repomd.xml +PRIMARY_XML=primary.xml +TARGET_URL="http://__not_initialized" + +Xpath_get() +{ + XPATH_RESULT='' + XPATH=$1 + XML_FILE=$2 + RESULT=$(xmllint --xpath $XPATH $XML_FILE) + if [[ -z ${RESULT// } ]]; then + Error "Can not find target from $XML_FILE" + Debug "Xpath = $XPATH" + exit 1 + fi + XPATH_RESULT=$RESULT +} + +fetch_tizen_pkgs_init() +{ + TARGET=$1 + PROFILE=$2 + Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" + + TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs + if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi + mkdir -p $TMP_PKG_DIR + + PKG_URL=$TIZEN_URL/$PROFILE/latest + + BUILD_XML_URL=$PKG_URL/$BUILD_XML + TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML + TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML + TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML + TMP_PRIMARYGZ=${TMP_PRIMARY}.gz + + Fetch $BUILD_XML_URL $TMP_BUILD + + Debug "fetch $BUILD_XML_URL to $TMP_BUILD" + + TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" + Xpath_get $TARGET_XPATH $TMP_BUILD + TARGET_PATH=$XPATH_RESULT + TARGET_URL=$PKG_URL/$TARGET_PATH + + REPOMD_URL=$TARGET_URL/repodata/repomd.xml + PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' + + Fetch $REPOMD_URL $TMP_REPOMD + + Debug "fetch $REPOMD_URL to $TMP_REPOMD" + + Xpath_get $PRIMARY_XPATH $TMP_REPOMD + PRIMARY_XML_PATH=$XPATH_RESULT + PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH + + Fetch $PRIMARY_URL $TMP_PRIMARYGZ + + Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" + + gunzip $TMP_PRIMARYGZ + + Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" +} + +fetch_tizen_pkgs() +{ + ARCH=$1 + PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' + + PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' + + for pkg in ${@:2} + do + Inform "Fetching... $pkg" + XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + PKG_PATH=$XPATH_RESULT + + XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + CHECKSUM=$XPATH_RESULT + + PKG_URL=$TARGET_URL/$PKG_PATH + PKG_FILE=$(basename $PKG_PATH) + PKG_PATH=$TMPDIR/$PKG_FILE + + Debug "Download $PKG_URL to $PKG_PATH" + Fetch $PKG_URL $PKG_PATH true + + echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null + if [ $? -ne 0 ]; then + Error "Fail to fetch $PKG_URL to $PKG_PATH" + Debug "Checksum = $CHECKSUM" + exit 1 + fi + done +} + +Inform "Initialize arm base" +fetch_tizen_pkgs_init standard base +Inform "fetch common packages" +fetch_tizen_pkgs armv7l gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils +Inform "fetch coreclr packages" +fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu +Inform "fetch corefx packages" +fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel + +Inform "Initialize standard unified" +fetch_tizen_pkgs_init standard unified +Inform "fetch corefx packages" +fetch_tizen_pkgs armv7l gssdp gssdp-devel tizen-release + diff --git a/eng/common/cross/armel/tizen/tizen-dotnet.ks b/eng/common/cross/armel/tizen/tizen-dotnet.ks new file mode 100644 index 0000000..506d455 --- /dev/null +++ b/eng/common/cross/armel/tizen/tizen-dotnet.ks @@ -0,0 +1,50 @@ +lang en_US.UTF-8 +keyboard us +timezone --utc Asia/Seoul + +part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime + +rootpw tizen +desktop --autologinuser=root +user --name root --groups audio,video --password 'tizen' + +repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no +repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no + +%packages +tar +gzip + +sed +grep +gawk +perl + +binutils +findutils +util-linux +lttng-ust +userspace-rcu +procps-ng +tzdata +ca-certificates + + +### Core FX +libicu +libunwind +iputils +zlib +krb5 +libcurl +libopenssl + +%end + +%post + +### Update /tmp privilege +chmod 777 /tmp +#################################### + +%end diff --git a/eng/common/cross/armel/tizen/tizen.patch b/eng/common/cross/armel/tizen/tizen.patch new file mode 100644 index 0000000..ca7c7c1 --- /dev/null +++ b/eng/common/cross/armel/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-littlearm) +-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) ) diff --git a/eng/common/cross/armv6/sources.list.buster b/eng/common/cross/armv6/sources.list.buster new file mode 100644 index 0000000..f27fc4f --- /dev/null +++ b/eng/common/cross/armv6/sources.list.buster @@ -0,0 +1,2 @@ +deb http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi +deb-src http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh new file mode 100644 index 0000000..f163fb9 --- /dev/null +++ b/eng/common/cross/build-android-rootfs.sh @@ -0,0 +1,131 @@ +#!/usr/bin/env bash +set -e +__NDK_Version=r21 + +usage() +{ + echo "Creates a toolchain and sysroot used for cross-compiling for Android." + echo. + echo "Usage: $0 [BuildArch] [ApiLevel]" + echo. + echo "BuildArch is the target architecture of Android. Currently only arm64 is supported." + echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html" + echo. + echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior" + echo "by setting the TOOLCHAIN_DIR environment variable" + echo. + echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation," + echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK." + echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android." + exit 1 +} + +__ApiLevel=28 # The minimum platform for arm64 is API level 21 but the minimum version that support glob(3) is 28. See $ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include/glob.h +__BuildArch=arm64 +__AndroidArch=aarch64 +__AndroidToolchain=aarch64-linux-android + +for i in "$@" + do + lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")" + case $lowerI in + -?|-h|--help) + usage + exit 1 + ;; + arm64) + __BuildArch=arm64 + __AndroidArch=aarch64 + __AndroidToolchain=aarch64-linux-android + ;; + arm) + __BuildArch=arm + __AndroidArch=arm + __AndroidToolchain=arm-linux-androideabi + ;; + *[0-9]) + __ApiLevel=$i + ;; + *) + __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i" + ;; + esac +done + +# Obtain the location of the bash script to figure out where the root of the repo is. +__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +__CrossDir="$__ScriptBaseDir/../../../.tools/android-rootfs" + +if [[ ! -f "$__CrossDir" ]]; then + mkdir -p "$__CrossDir" +fi + +# Resolve absolute path to avoid `../` in build logs +__CrossDir="$( cd "$__CrossDir" && pwd )" + +__NDK_Dir="$__CrossDir/android-ndk-$__NDK_Version" +__lldb_Dir="$__CrossDir/lldb" +__ToolchainDir="$__CrossDir/android-ndk-$__NDK_Version" + +if [[ -n "$TOOLCHAIN_DIR" ]]; then + __ToolchainDir=$TOOLCHAIN_DIR +fi + +if [[ -n "$NDK_DIR" ]]; then + __NDK_Dir=$NDK_DIR +fi + +echo "Target API level: $__ApiLevel" +echo "Target architecture: $__BuildArch" +echo "NDK location: $__NDK_Dir" +echo "Target Toolchain location: $__ToolchainDir" + +# Download the NDK if required +if [ ! -d $__NDK_Dir ]; then + echo Downloading the NDK into $__NDK_Dir + mkdir -p $__NDK_Dir + wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip + unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir +fi + +if [ ! -d $__lldb_Dir ]; then + mkdir -p $__lldb_Dir + echo Downloading LLDB into $__lldb_Dir + wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip + unzip -q $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir +fi + +echo "Download dependencies..." +__TmpDir=$__CrossDir/tmp/$__BuildArch/ +mkdir -p "$__TmpDir" + +# combined dependencies for coreclr, installer and libraries +__AndroidPackages="libicu" +__AndroidPackages+=" libandroid-glob" +__AndroidPackages+=" liblzma" +__AndroidPackages+=" krb5" +__AndroidPackages+=" openssl" + +for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/main/binary-$__AndroidArch/Packages |\ + grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do + + if [[ "$path" != "Filename:" ]]; then + echo "Working on: $path" + wget -qO- https://packages.termux.dev/termux-main-21/$path | dpkg -x - "$__TmpDir" + fi +done + +cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/" + +# Generate platform file for build.sh script to assign to __DistroRid +echo "Generating platform file..." +echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform + +echo "Now to build coreclr, libraries and installers; run:" +echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ + --subsetCategory coreclr +echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ + --subsetCategory libraries +echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ + --subsetCategory installer diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh new file mode 100644 index 0000000..9caf9b0 --- /dev/null +++ b/eng/common/cross/build-rootfs.sh @@ -0,0 +1,648 @@ +#!/usr/bin/env bash + +set -e + +usage() +{ + echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]" + echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86" + echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine" + echo " for alpine can be specified with version: alpineX.YY or alpineedge" + echo " for FreeBSD can be: freebsd12, freebsd13" + echo " for illumos can be: illumos" + echo " for Haiku can be: haiku." + echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD" + echo "llvmx[.y] - optional, LLVM version for LLVM related packages." + echo "--skipunmount - optional, will skip the unmount of rootfs folder." + echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)." + echo "--use-mirror - optional, use mirror URL to fetch resources, when available." + echo "--jobs N - optional, restrict to N jobs." + exit 1 +} + +__CodeName=xenial +__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__BuildArch=arm +__AlpineArch=armv7 +__FreeBSDArch=arm +__FreeBSDMachineArch=armv7 +__IllumosArch=arm7 +__HaikuArch=arm +__QEMUArch=arm +__UbuntuArch=armhf +__UbuntuRepo="http://ports.ubuntu.com/" +__LLDB_Package="liblldb-3.9-dev" +__SkipUnmount=0 + +# base development support +__UbuntuPackages="build-essential" + +__AlpinePackages="alpine-base" +__AlpinePackages+=" build-base" +__AlpinePackages+=" linux-headers" +__AlpinePackages+=" lldb-dev" +__AlpinePackages+=" python3" +__AlpinePackages+=" libedit" + +# symlinks fixer +__UbuntuPackages+=" symlinks" + +# runtime dependencies +__UbuntuPackages+=" libicu-dev" +__UbuntuPackages+=" liblttng-ust-dev" +__UbuntuPackages+=" libunwind8-dev" +__UbuntuPackages+=" libnuma-dev" + +__AlpinePackages+=" gettext-dev" +__AlpinePackages+=" icu-dev" +__AlpinePackages+=" libunwind-dev" +__AlpinePackages+=" lttng-ust-dev" +__AlpinePackages+=" compiler-rt" +__AlpinePackages+=" numactl-dev" + +# runtime libraries' dependencies +__UbuntuPackages+=" libcurl4-openssl-dev" +__UbuntuPackages+=" libkrb5-dev" +__UbuntuPackages+=" libssl-dev" +__UbuntuPackages+=" zlib1g-dev" + +__AlpinePackages+=" curl-dev" +__AlpinePackages+=" krb5-dev" +__AlpinePackages+=" openssl-dev" +__AlpinePackages+=" zlib-dev" + +__FreeBSDBase="12.4-RELEASE" +__FreeBSDPkg="1.17.0" +__FreeBSDABI="12" +__FreeBSDPackages="libunwind" +__FreeBSDPackages+=" icu" +__FreeBSDPackages+=" libinotify" +__FreeBSDPackages+=" openssl" +__FreeBSDPackages+=" krb5" +__FreeBSDPackages+=" terminfo-db" + +__IllumosPackages="icu" +__IllumosPackages+=" mit-krb5" +__IllumosPackages+=" openssl" +__IllumosPackages+=" zlib" + +__HaikuPackages="gcc_syslibs" +__HaikuPackages+=" gcc_syslibs_devel" +__HaikuPackages+=" gmp" +__HaikuPackages+=" gmp_devel" +__HaikuPackages+=" icu66" +__HaikuPackages+=" icu66_devel" +__HaikuPackages+=" krb5" +__HaikuPackages+=" krb5_devel" +__HaikuPackages+=" libiconv" +__HaikuPackages+=" libiconv_devel" +__HaikuPackages+=" llvm12_libunwind" +__HaikuPackages+=" llvm12_libunwind_devel" +__HaikuPackages+=" mpfr" +__HaikuPackages+=" mpfr_devel" +__HaikuPackages+=" openssl" +__HaikuPackages+=" openssl_devel" +__HaikuPackages+=" zlib" +__HaikuPackages+=" zlib_devel" + +# ML.NET dependencies +__UbuntuPackages+=" libomp5" +__UbuntuPackages+=" libomp-dev" + +# Taken from https://github.com/alpinelinux/alpine-chroot-install/blob/6d08f12a8a70dd9b9dc7d997c88aa7789cc03c42/alpine-chroot-install#L85-L133 +__AlpineKeys=' +4a6a0840:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1yHJxQgsHQREclQu4Ohe\nqxTxd1tHcNnvnQTu/UrTky8wWvgXT+jpveroeWWnzmsYlDI93eLI2ORakxb3gA2O\nQ0Ry4ws8vhaxLQGC74uQR5+/yYrLuTKydFzuPaS1dK19qJPXB8GMdmFOijnXX4SA\njixuHLe1WW7kZVtjL7nufvpXkWBGjsfrvskdNA/5MfxAeBbqPgaq0QMEfxMAn6/R\nL5kNepi/Vr4S39Xvf2DzWkTLEK8pcnjNkt9/aafhWqFVW7m3HCAII6h/qlQNQKSo\nGuH34Q8GsFG30izUENV9avY7hSLq7nggsvknlNBZtFUcmGoQrtx3FmyYsIC8/R+B\nywIDAQAB +5243ef4b:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvNijDxJ8kloskKQpJdx+\nmTMVFFUGDoDCbulnhZMJoKNkSuZOzBoFC94omYPtxnIcBdWBGnrm6ncbKRlR+6oy\nDO0W7c44uHKCFGFqBhDasdI4RCYP+fcIX/lyMh6MLbOxqS22TwSLhCVjTyJeeH7K\naA7vqk+QSsF4TGbYzQDDpg7+6aAcNzg6InNePaywA6hbT0JXbxnDWsB+2/LLSF2G\nmnhJlJrWB1WGjkz23ONIWk85W4S0XB/ewDefd4Ly/zyIciastA7Zqnh7p3Ody6Q0\nsS2MJzo7p3os1smGjUF158s6m/JbVh4DN6YIsxwl2OjDOz9R0OycfJSDaBVIGZzg\ncQIDAQAB +524d27bb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr8s1q88XpuJWLCZALdKj\nlN8wg2ePB2T9aIcaxryYE/Jkmtu+ZQ5zKq6BT3y/udt5jAsMrhHTwroOjIsF9DeG\ne8Y3vjz+Hh4L8a7hZDaw8jy3CPag47L7nsZFwQOIo2Cl1SnzUc6/owoyjRU7ab0p\niWG5HK8IfiybRbZxnEbNAfT4R53hyI6z5FhyXGS2Ld8zCoU/R4E1P0CUuXKEN4p0\n64dyeUoOLXEWHjgKiU1mElIQj3k/IF02W89gDj285YgwqA49deLUM7QOd53QLnx+\nxrIrPv3A+eyXMFgexNwCKQU9ZdmWa00MjjHlegSGK8Y2NPnRoXhzqSP9T9i2HiXL\nVQIDAQAB +5261cecb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwlzMkl7b5PBdfMzGdCT0\ncGloRr5xGgVmsdq5EtJvFkFAiN8Ac9MCFy/vAFmS8/7ZaGOXoCDWbYVLTLOO2qtX\nyHRl+7fJVh2N6qrDDFPmdgCi8NaE+3rITWXGrrQ1spJ0B6HIzTDNEjRKnD4xyg4j\ng01FMcJTU6E+V2JBY45CKN9dWr1JDM/nei/Pf0byBJlMp/mSSfjodykmz4Oe13xB\nCa1WTwgFykKYthoLGYrmo+LKIGpMoeEbY1kuUe04UiDe47l6Oggwnl+8XD1MeRWY\nsWgj8sF4dTcSfCMavK4zHRFFQbGp/YFJ/Ww6U9lA3Vq0wyEI6MCMQnoSMFwrbgZw\nwwIDAQAB +58199dcc:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3v8/ye/V/t5xf4JiXLXa\nhWFRozsnmn3hobON20GdmkrzKzO/eUqPOKTpg2GtvBhK30fu5oY5uN2ORiv2Y2ht\neLiZ9HVz3XP8Fm9frha60B7KNu66FO5P2o3i+E+DWTPqqPcCG6t4Znk2BypILcit\nwiPKTsgbBQR2qo/cO01eLLdt6oOzAaF94NH0656kvRewdo6HG4urbO46tCAizvCR\nCA7KGFMyad8WdKkTjxh8YLDLoOCtoZmXmQAiwfRe9pKXRH/XXGop8SYptLqyVVQ+\ntegOD9wRs2tOlgcLx4F/uMzHN7uoho6okBPiifRX+Pf38Vx+ozXh056tjmdZkCaV\naQIDAQAB +58cbb476:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoSPnuAGKtRIS5fEgYPXD\n8pSGvKAmIv3A08LBViDUe+YwhilSHbYXUEAcSH1KZvOo1WT1x2FNEPBEFEFU1Eyc\n+qGzbA03UFgBNvArurHQ5Z/GngGqE7IarSQFSoqewYRtFSfp+TL9CUNBvM0rT7vz\n2eMu3/wWG+CBmb92lkmyWwC1WSWFKO3x8w+Br2IFWvAZqHRt8oiG5QtYvcZL6jym\nY8T6sgdDlj+Y+wWaLHs9Fc+7vBuyK9C4O1ORdMPW15qVSl4Lc2Wu1QVwRiKnmA+c\nDsH/m7kDNRHM7TjWnuj+nrBOKAHzYquiu5iB3Qmx+0gwnrSVf27Arc3ozUmmJbLj\nzQIDAQAB +58e4f17d:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvBxJN9ErBgdRcPr5g4hV\nqyUSGZEKuvQliq2Z9SRHLh2J43+EdB6A+yzVvLnzcHVpBJ+BZ9RV30EM9guck9sh\nr+bryZcRHyjG2wiIEoduxF2a8KeWeQH7QlpwGhuobo1+gA8L0AGImiA6UP3LOirl\nI0G2+iaKZowME8/tydww4jx5vG132JCOScMjTalRsYZYJcjFbebQQolpqRaGB4iG\nWqhytWQGWuKiB1A22wjmIYf3t96l1Mp+FmM2URPxD1gk/BIBnX7ew+2gWppXOK9j\n1BJpo0/HaX5XoZ/uMqISAAtgHZAqq+g3IUPouxTphgYQRTRYpz2COw3NF43VYQrR\nbQIDAQAB +60ac2099:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwR4uJVtJOnOFGchnMW5Y\nj5/waBdG1u5BTMlH+iQMcV5+VgWhmpZHJCBz3ocD+0IGk2I68S5TDOHec/GSC0lv\n6R9o6F7h429GmgPgVKQsc8mPTPtbjJMuLLs4xKc+viCplXc0Nc0ZoHmCH4da6fCV\ntdpHQjVe6F9zjdquZ4RjV6R6JTiN9v924dGMAkbW/xXmamtz51FzondKC52Gh8Mo\n/oA0/T0KsCMCi7tb4QNQUYrf+Xcha9uus4ww1kWNZyfXJB87a2kORLiWMfs2IBBJ\nTmZ2Fnk0JnHDb8Oknxd9PvJPT0mvyT8DA+KIAPqNvOjUXP4bnjEHJcoCP9S5HkGC\nIQIDAQAB +6165ee59:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAutQkua2CAig4VFSJ7v54\nALyu/J1WB3oni7qwCZD3veURw7HxpNAj9hR+S5N/pNeZgubQvJWyaPuQDm7PTs1+\ntFGiYNfAsiibX6Rv0wci3M+z2XEVAeR9Vzg6v4qoofDyoTbovn2LztaNEjTkB+oK\ntlvpNhg1zhou0jDVYFniEXvzjckxswHVb8cT0OMTKHALyLPrPOJzVtM9C1ew2Nnc\n3848xLiApMu3NBk0JqfcS3Bo5Y2b1FRVBvdt+2gFoKZix1MnZdAEZ8xQzL/a0YS5\nHd0wj5+EEKHfOd3A75uPa/WQmA+o0cBFfrzm69QDcSJSwGpzWrD1ScH3AK8nWvoj\nv7e9gukK/9yl1b4fQQ00vttwJPSgm9EnfPHLAtgXkRloI27H6/PuLoNvSAMQwuCD\nhQRlyGLPBETKkHeodfLoULjhDi1K2gKJTMhtbnUcAA7nEphkMhPWkBpgFdrH+5z4\nLxy+3ek0cqcI7K68EtrffU8jtUj9LFTUC8dERaIBs7NgQ/LfDbDfGh9g6qVj1hZl\nk9aaIPTm/xsi8v3u+0qaq7KzIBc9s59JOoA8TlpOaYdVgSQhHHLBaahOuAigH+VI\nisbC9vmqsThF2QdDtQt37keuqoda2E6sL7PUvIyVXDRfwX7uMDjlzTxHTymvq2Ck\nhtBqojBnThmjJQFgZXocHG8CAwEAAQ== +61666e3f:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlEyxkHggKCXC2Wf5Mzx4\nnZLFZvU2bgcA3exfNPO/g1YunKfQY+Jg4fr6tJUUTZ3XZUrhmLNWvpvSwDS19ZmC\nIXOu0+V94aNgnhMsk9rr59I8qcbsQGIBoHzuAl8NzZCgdbEXkiY90w1skUw8J57z\nqCsMBydAueMXuWqF5nGtYbi5vHwK42PffpiZ7G5Kjwn8nYMW5IZdL6ZnMEVJUWC9\nI4waeKg0yskczYDmZUEAtrn3laX9677ToCpiKrvmZYjlGl0BaGp3cxggP2xaDbUq\nqfFxWNgvUAb3pXD09JM6Mt6HSIJaFc9vQbrKB9KT515y763j5CC2KUsilszKi3mB\nHYe5PoebdjS7D1Oh+tRqfegU2IImzSwW3iwA7PJvefFuc/kNIijfS/gH/cAqAK6z\nbhdOtE/zc7TtqW2Wn5Y03jIZdtm12CxSxwgtCF1NPyEWyIxAQUX9ACb3M0FAZ61n\nfpPrvwTaIIxxZ01L3IzPLpbc44x/DhJIEU+iDt6IMTrHOphD9MCG4631eIdB0H1b\n6zbNX1CXTsafqHRFV9XmYYIeOMggmd90s3xIbEujA6HKNP/gwzO6CDJ+nHFDEqoF\nSkxRdTkEqjTjVKieURW7Swv7zpfu5PrsrrkyGnsRrBJJzXlm2FOOxnbI2iSL1B5F\nrO5kbUxFeZUIDq+7Yv4kLWcCAwEAAQ== +616a9724:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnC+bR4bHf/L6QdU4puhQ\ngl1MHePszRC38bzvVFDUJsmCaMCL2suCs2A2yxAgGb9pu9AJYLAmxQC4mM3jNqhg\n/E7yuaBbek3O02zN/ctvflJ250wZCy+z0ZGIp1ak6pu1j14IwHokl9j36zNfGtfv\nADVOcdpWITFFlPqwq1qt/H3UsKVmtiF3BNWWTeUEQwKvlU8ymxgS99yn0+4OPyNT\nL3EUeS+NQJtDS01unau0t7LnjUXn+XIneWny8bIYOQCuVR6s/gpIGuhBaUqwaJOw\n7jkJZYF2Ij7uPb4b5/R3vX2FfxxqEHqssFSg8FFUNTZz3qNZs0CRVyfA972g9WkJ\nhPfn31pQYil4QGRibCMIeU27YAEjXoqfJKEPh4UWMQsQLrEfdGfb8VgwrPbniGfU\nL3jKJR3VAafL9330iawzVQDlIlwGl6u77gEXMl9K0pfazunYhAp+BMP+9ot5ckK+\nosmrqj11qMESsAj083GeFdfV3pXEIwUytaB0AKEht9DbqUfiE/oeZ/LAXgySMtVC\nsbC4ESmgVeY2xSBIJdDyUap7FR49GGrw0W49NUv9gRgQtGGaNVQQO9oGL2PBC41P\niWF9GLoX30HIz1P8PF/cZvicSSPkQf2Z6TV+t0ebdGNS5DjapdnCrq8m9Z0pyKsQ\nuxAL2a7zX8l5i1CZh1ycUGsCAwEAAQ== +616abc23:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0MfCDrhODRCIxR9Dep1s\neXafh5CE5BrF4WbCgCsevyPIdvTeyIaW4vmO3bbG4VzhogDZju+R3IQYFuhoXP5v\nY+zYJGnwrgz3r5wYAvPnLEs1+dtDKYOgJXQj+wLJBW1mzRDL8FoRXOe5iRmn1EFS\nwZ1DoUvyu7/J5r0itKicZp3QKED6YoilXed+1vnS4Sk0mzN4smuMR9eO1mMCqNp9\n9KTfRDHTbakIHwasECCXCp50uXdoW6ig/xUAFanpm9LtK6jctNDbXDhQmgvAaLXZ\nLvFqoaYJ/CvWkyYCgL6qxvMvVmPoRv7OPcyni4xR/WgWa0MSaEWjgPx3+yj9fiMA\n1S02pFWFDOr5OUF/O4YhFJvUCOtVsUPPfA/Lj6faL0h5QI9mQhy5Zb9TTaS9jB6p\nLw7u0dJlrjFedk8KTJdFCcaGYHP6kNPnOxMylcB/5WcztXZVQD5WpCicGNBxCGMm\nW64SgrV7M07gQfL/32QLsdqPUf0i8hoVD8wfQ3EpbQzv6Fk1Cn90bZqZafg8XWGY\nwddhkXk7egrr23Djv37V2okjzdqoyLBYBxMz63qQzFoAVv5VoY2NDTbXYUYytOvG\nGJ1afYDRVWrExCech1mX5ZVUB1br6WM+psFLJFoBFl6mDmiYt0vMYBddKISsvwLl\nIJQkzDwtXzT2cSjoj3T5QekCAwEAAQ== +616ac3bc:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvaaoSLab+IluixwKV5Od\n0gib2YurjPatGIbn5Ov2DLUFYiebj2oJINXJSwUOO+4WcuHFEqiL/1rya+k5hLZt\nhnPL1tn6QD4rESznvGSasRCQNT2vS/oyZbTYJRyAtFkEYLlq0t3S3xBxxHWuvIf0\nqVxVNYpQWyM3N9RIeYBR/euXKJXileSHk/uq1I5wTC0XBIHWcthczGN0m9wBEiWS\n0m3cnPk4q0Ea8mUJ91Rqob19qETz6VbSPYYpZk3qOycjKosuwcuzoMpwU8KRiMFd\n5LHtX0Hx85ghGsWDVtS0c0+aJa4lOMGvJCAOvDfqvODv7gKlCXUpgumGpLdTmaZ8\n1RwqspAe3IqBcdKTqRD4m2mSg23nVx2FAY3cjFvZQtfooT7q1ItRV5RgH6FhQSl7\n+6YIMJ1Bf8AAlLdRLpg+doOUGcEn+pkDiHFgI8ylH1LKyFKw+eXaAml/7DaWZk1d\ndqggwhXOhc/UUZFQuQQ8A8zpA13PcbC05XxN2hyP93tCEtyynMLVPtrRwDnHxFKa\nqKzs3rMDXPSXRn3ZZTdKH3069ApkEjQdpcwUh+EmJ1Ve/5cdtzT6kKWCjKBFZP/s\n91MlRrX2BTRdHaU5QJkUheUtakwxuHrdah2F94lRmsnQlpPr2YseJu6sIE+Dnx4M\nCfhdVbQL2w54R645nlnohu8CAwEAAQ== +616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ== +616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ== +616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ== +' +__Keyring= +__SkipSigCheck=0 +__UseMirror=0 + +__UnprocessedBuildArgs= +while :; do + if [[ "$#" -le 0 ]]; then + break + fi + + lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + case $lowerI in + -\?|-h|--help) + usage + exit 1 + ;; + arm) + __BuildArch=arm + __UbuntuArch=armhf + __AlpineArch=armv7 + __QEMUArch=arm + ;; + arm64) + __BuildArch=arm64 + __UbuntuArch=arm64 + __AlpineArch=aarch64 + __QEMUArch=aarch64 + __FreeBSDArch=arm64 + __FreeBSDMachineArch=aarch64 + ;; + armel) + __BuildArch=armel + __UbuntuArch=armel + __UbuntuRepo="http://ftp.debian.org/debian/" + __CodeName=jessie + ;; + armv6) + __BuildArch=armv6 + __UbuntuArch=armhf + __QEMUArch=arm + __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" + __CodeName=buster + __LLDB_Package="liblldb-6.0-dev" + + if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then + __Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg" + fi + ;; + riscv64) + __BuildArch=riscv64 + __AlpineArch=riscv64 + __AlpinePackages="${__AlpinePackages// lldb-dev/}" + __QEMUArch=riscv64 + __UbuntuArch=riscv64 + __UbuntuRepo="http://deb.debian.org/debian-ports" + __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" + unset __LLDB_Package + + if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then + __Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring" + fi + ;; + ppc64le) + __BuildArch=ppc64le + __AlpineArch=ppc64le + __QEMUArch=ppc64le + __UbuntuArch=ppc64el + __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" + __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp5/}" + unset __LLDB_Package + ;; + s390x) + __BuildArch=s390x + __AlpineArch=s390x + __QEMUArch=s390x + __UbuntuArch=s390x + __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" + __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp5/}" + unset __LLDB_Package + ;; + x64) + __BuildArch=x64 + __AlpineArch=x86_64 + __UbuntuArch=amd64 + __FreeBSDArch=amd64 + __FreeBSDMachineArch=amd64 + __illumosArch=x86_64 + __HaikuArch=x86_64 + __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" + ;; + x86) + __BuildArch=x86 + __UbuntuArch=i386 + __AlpineArch=x86 + __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" + ;; + lldb*) + version="${lowerI/lldb/}" + parts=(${version//./ }) + + # for versions > 6.0, lldb has dropped the minor version + if [[ "${parts[0]}" -gt 6 ]]; then + version="${parts[0]}" + fi + + __LLDB_Package="liblldb-${version}-dev" + ;; + no-lldb) + unset __LLDB_Package + ;; + llvm*) + version="${lowerI/llvm/}" + parts=(${version//./ }) + __LLVM_MajorVersion="${parts[0]}" + __LLVM_MinorVersion="${parts[1]}" + + # for versions > 6.0, llvm has dropped the minor version + if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then + __LLVM_MinorVersion=0; + fi + ;; + xenial) # Ubuntu 16.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=xenial + fi + ;; + zesty) # Ubuntu 17.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=zesty + fi + ;; + bionic) # Ubuntu 18.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=bionic + fi + ;; + focal) # Ubuntu 20.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=focal + fi + ;; + jammy) # Ubuntu 22.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=jammy + fi + ;; + jessie) # Debian 8 + __CodeName=jessie + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + stretch) # Debian 9 + __CodeName=stretch + __LLDB_Package="liblldb-6.0-dev" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + buster) # Debian 10 + __CodeName=buster + __LLDB_Package="liblldb-6.0-dev" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + bullseye) # Debian 11 + __CodeName=bullseye + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + sid) # Debian sid + __CodeName=sid + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + tizen) + __CodeName= + __UbuntuRepo= + __Tizen=tizen + ;; + alpine*) + __CodeName=alpine + __UbuntuRepo= + version="${lowerI/alpine/}" + + if [[ "$version" == "edge" ]]; then + __AlpineVersion=edge + else + parts=(${version//./ }) + __AlpineMajorVersion="${parts[0]}" + __AlpineMinoVersion="${parts[1]}" + __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion" + fi + ;; + freebsd12) + __CodeName=freebsd + __SkipUnmount=1 + ;; + freebsd13) + __CodeName=freebsd + __FreeBSDBase="13.2-RELEASE" + __FreeBSDABI="13" + __SkipUnmount=1 + ;; + illumos) + __CodeName=illumos + __SkipUnmount=1 + ;; + haiku) + __CodeName=haiku + __SkipUnmount=1 + ;; + --skipunmount) + __SkipUnmount=1 + ;; + --skipsigcheck) + __SkipSigCheck=1 + ;; + --rootfsdir|-rootfsdir) + shift + __RootfsDir="$1" + ;; + --use-mirror) + __UseMirror=1 + ;; + --use-jobs) + shift + MAXJOBS=$1 + ;; + *) + __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1" + ;; + esac + + shift +done + +case "$__AlpineVersion" in + 3.14) __AlpinePackages+=" llvm11-libs" ;; + 3.15) __AlpinePackages+=" llvm12-libs" ;; + 3.16) __AlpinePackages+=" llvm13-libs" ;; + 3.17) __AlpinePackages+=" llvm15-libs" ;; + edge) __AlpineLlvmLibsLookup=1 ;; + *) + if [[ "$__AlpineArch" =~ s390x|ppc64le ]]; then + __AlpineVersion=3.15 # minimum version that supports lldb-dev + __AlpinePackages+=" llvm12-libs" + elif [[ "$__AlpineArch" == "x86" ]]; then + __AlpineVersion=3.17 # minimum version that supports lldb-dev + __AlpinePackages+=" llvm15-libs" + elif [[ "$__AlpineArch" == "riscv64" ]]; then + __AlpineLlvmLibsLookup=1 + __AlpineVersion=edge # minimum version with APKINDEX.tar.gz (packages archive) + else + __AlpineVersion=3.13 # 3.13 to maximize compatibility + __AlpinePackages+=" llvm10-libs" + + if [[ "$__AlpineArch" == "armv7" ]]; then + __AlpinePackages="${__AlpinePackages//numactl-dev/}" + fi + fi +esac + +if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then + # compiler-rt--static was merged in compiler-rt package in alpine 3.16 + # for older versions, we need compiler-rt--static, so replace the name + __AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}" +fi + +if [[ "$__BuildArch" == "armel" ]]; then + __LLDB_Package="lldb-3.5-dev" +fi + +if [[ "$__CodeName" == "xenial" && "$__UbuntuArch" == "armhf" ]]; then + # libnuma-dev is not available on armhf for xenial + __UbuntuPackages="${__UbuntuPackages//libnuma-dev/}" +fi + +__UbuntuPackages+=" ${__LLDB_Package:-}" + +if [[ -n "$__LLVM_MajorVersion" ]]; then + __UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev" +fi + +if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then + __RootfsDir="$ROOTFS_DIR" +fi + +if [[ -z "$__RootfsDir" ]]; then + __RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch" +fi + +if [[ -d "$__RootfsDir" ]]; then + if [[ "$__SkipUnmount" == "0" ]]; then + umount "$__RootfsDir"/* || true + fi + rm -rf "$__RootfsDir" +fi + +mkdir -p "$__RootfsDir" +__RootfsDir="$( cd "$__RootfsDir" && pwd )" + +if [[ "$__CodeName" == "alpine" ]]; then + __ApkToolsVersion=2.12.11 + __ApkToolsSHA512SUM=53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33 + __ApkToolsDir="$(mktemp -d)" + __ApkKeysDir="$(mktemp -d)" + + wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir" + echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c + chmod +x "$__ApkToolsDir/apk.static" + + if [[ -f "/usr/bin/qemu-$__QEMUArch-static" ]]; then + mkdir -p "$__RootfsDir"/usr/bin + cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin" + fi + + if [[ "$__AlpineVersion" == "edge" ]]; then + version=edge + else + version="v$__AlpineVersion" + fi + + for line in $__AlpineKeys; do + id="${line%%:*}" + content="${line#*:}" + + echo -e "-----BEGIN PUBLIC KEY-----\n$content\n-----END PUBLIC KEY-----" > "$__ApkKeysDir/alpine-devel@lists.alpinelinux.org-$id.rsa.pub" + done + + if [[ "$__SkipSigCheck" == "1" ]]; then + __ApkSignatureArg="--allow-untrusted" + else + __ApkSignatureArg="--keys-dir $__ApkKeysDir" + fi + + # initialize DB + "$__ApkToolsDir/apk.static" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add + + if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then + __AlpinePackages+=" $("$__ApkToolsDir/apk.static" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ + search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')" + fi + + # install all packages in one go + "$__ApkToolsDir/apk.static" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ + add $__AlpinePackages + + rm -r "$__ApkToolsDir" +elif [[ "$__CodeName" == "freebsd" ]]; then + mkdir -p "$__RootfsDir"/usr/local/etc + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf + echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf + mkdir -p "$__RootfsDir"/tmp + # get and build package manager + wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" + # needed for install to succeed + mkdir -p "$__RootfsDir"/host/etc + ./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install + rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" + # install packages we need. + INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update + INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages +elif [[ "$__CodeName" == "illumos" ]]; then + mkdir "$__RootfsDir/tmp" + pushd "$__RootfsDir/tmp" + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + echo "Downloading sysroot." + wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + echo "Building binutils. Please wait.." + wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf - + mkdir build-binutils && cd build-binutils + ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" + make -j "$JOBS" && make install && cd .. + echo "Building gcc. Please wait.." + wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf - + CFLAGS="-fPIC" + CXXFLAGS="-fPIC" + CXXFLAGS_FOR_TARGET="-fPIC" + CFLAGS_FOR_TARGET="-fPIC" + export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET + mkdir build-gcc && cd build-gcc + ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \ + --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \ + --disable-libquadmath-support --disable-shared --enable-tls + make -j "$JOBS" && make install && cd .. + BaseUrl=https://pkgsrc.smartos.org + if [[ "$__UseMirror" == 1 ]]; then + BaseUrl=https://pkgsrc.smartos.skylime.net + fi + BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All" + echo "Downloading manifest" + wget "$BaseUrl" + echo "Downloading dependencies." + read -ra array <<<"$__IllumosPackages" + for package in "${array[@]}"; do + echo "Installing '$package'" + # find last occurrence of package in listing and extract its name + package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)" + echo "Resolved name '$package'" + wget "$BaseUrl"/"$package".tgz + ar -x "$package".tgz + tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null + done + echo "Cleaning up temporary files." + popd + rm -rf "$__RootfsDir"/{tmp,+*} + mkdir -p "$__RootfsDir"/usr/include/net + mkdir -p "$__RootfsDir"/usr/include/netpacket + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h + wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h + wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h +elif [[ "$__CodeName" == "haiku" ]]; then + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + + echo "Building Haiku sysroot for $__HaikuArch" + mkdir -p "$__RootfsDir/tmp" + pushd "$__RootfsDir/tmp" + + mkdir "$__RootfsDir/tmp/download" + + echo "Downloading Haiku package tool" + git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 $__RootfsDir/tmp/script + wget -O "$__RootfsDir/tmp/download/hosttools.zip" $($__RootfsDir/tmp/script/fetch.sh --hosttools) + unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin" + + DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg" + HpkgBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current" + + # Download Haiku packages + echo "Downloading Haiku packages" + read -ra array <<<"$__HaikuPackages" + for package in "${array[@]}"; do + echo "Downloading $package..." + # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60 + # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598 + hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \ + --header='Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" + wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + done + for package in haiku haiku_devel; do + echo "Downloading $package..." + hpkgVersion="$(wget -qO- $HpkgBaseUrl | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" + wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + done + + # Set up the sysroot + echo "Setting up sysroot and extracting required packages" + mkdir -p "$__RootfsDir/boot/system" + for file in "$__RootfsDir/tmp/download/"*.hpkg; do + echo "Extracting $file..." + LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package" extract -C "$__RootfsDir/boot/system" "$file" + done + + # Download buildtools + echo "Downloading Haiku buildtools" + wget -O "$__RootfsDir/tmp/download/buildtools.zip" $($__RootfsDir/tmp/script/fetch.sh --buildtools --arch=$__HaikuArch) + unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir" + + # Cleaning up temporary files + echo "Cleaning up temporary files" + popd + rm -rf "$__RootfsDir/tmp" +elif [[ -n "$__CodeName" ]]; then + + if [[ "$__SkipSigCheck" == "0" ]]; then + __Keyring="$__Keyring --force-check-gpg" + fi + + debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" + cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list" + chroot "$__RootfsDir" apt-get update + chroot "$__RootfsDir" apt-get -f -y install + chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages + chroot "$__RootfsDir" symlinks -cr /usr + chroot "$__RootfsDir" apt-get clean + + if [[ "$__SkipUnmount" == "0" ]]; then + umount "$__RootfsDir"/* || true + fi + + if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then + pushd "$__RootfsDir" + patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch" + popd + fi +elif [[ "$__Tizen" == "tizen" ]]; then + ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch" +else + echo "Unsupported target platform." + usage; + exit 1 +fi diff --git a/eng/common/cross/ppc64le/sources.list.bionic b/eng/common/cross/ppc64le/sources.list.bionic new file mode 100644 index 0000000..2109557 --- /dev/null +++ b/eng/common/cross/ppc64le/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/riscv64/sources.list.sid b/eng/common/cross/riscv64/sources.list.sid new file mode 100644 index 0000000..65f730d --- /dev/null +++ b/eng/common/cross/riscv64/sources.list.sid @@ -0,0 +1 @@ +deb http://deb.debian.org/debian-ports sid main diff --git a/eng/common/cross/s390x/sources.list.bionic b/eng/common/cross/s390x/sources.list.bionic new file mode 100644 index 0000000..2109557 --- /dev/null +++ b/eng/common/cross/s390x/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/tizen-build-rootfs.sh b/eng/common/cross/tizen-build-rootfs.sh new file mode 100644 index 0000000..ac84173 --- /dev/null +++ b/eng/common/cross/tizen-build-rootfs.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bash +set -e + +ARCH=$1 +LINK_ARCH=$ARCH + +case "$ARCH" in + arm) + TIZEN_ARCH="armv7hl" + ;; + armel) + TIZEN_ARCH="armv7l" + LINK_ARCH="arm" + ;; + arm64) + TIZEN_ARCH="aarch64" + ;; + x86) + TIZEN_ARCH="i686" + ;; + x64) + TIZEN_ARCH="x86_64" + LINK_ARCH="x86" + ;; + *) + echo "Unsupported architecture for tizen: $ARCH" + exit 1 +esac + +__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__TIZEN_CROSSDIR="$__CrossDir/${ARCH}/tizen" + +if [[ -z "$ROOTFS_DIR" ]]; then + echo "ROOTFS_DIR is not defined." + exit 1; +fi + +TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp +mkdir -p $TIZEN_TMP_DIR + +# Download files +echo ">>Start downloading files" +VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR $TIZEN_ARCH +echo "<>Start constructing Tizen rootfs" +TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm` +cd $ROOTFS_DIR +for f in $TIZEN_RPM_FILES; do + rpm2cpio $f | cpio -idm --quiet +done +echo "<>Start configuring Tizen rootfs" +ln -sfn asm-${LINK_ARCH} ./usr/include/asm +patch -p1 < $__TIZEN_CROSSDIR/tizen.patch +echo "</dev/null; then + VERBOSE=0 +fi + +Log() +{ + if [ $VERBOSE -ge $1 ]; then + echo ${@:2} + fi +} + +Inform() +{ + Log 1 -e "\x1B[0;34m$@\x1B[m" +} + +Debug() +{ + Log 2 -e "\x1B[0;32m$@\x1B[m" +} + +Error() +{ + >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" +} + +Fetch() +{ + URL=$1 + FILE=$2 + PROGRESS=$3 + if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then + CURL_OPT="--progress-bar" + else + CURL_OPT="--silent" + fi + curl $CURL_OPT $URL > $FILE +} + +hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } +hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } +hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } + +TMPDIR=$1 +if [ ! -d $TMPDIR ]; then + TMPDIR=./tizen_tmp + Debug "Create temporary directory : $TMPDIR" + mkdir -p $TMPDIR +fi + +TIZEN_ARCH=$2 + +TIZEN_URL=http://download.tizen.org/snapshots/TIZEN/Tizen +BUILD_XML=build.xml +REPOMD_XML=repomd.xml +PRIMARY_XML=primary.xml +TARGET_URL="http://__not_initialized" + +Xpath_get() +{ + XPATH_RESULT='' + XPATH=$1 + XML_FILE=$2 + RESULT=$(xmllint --xpath $XPATH $XML_FILE) + if [[ -z ${RESULT// } ]]; then + Error "Can not find target from $XML_FILE" + Debug "Xpath = $XPATH" + exit 1 + fi + XPATH_RESULT=$RESULT +} + +fetch_tizen_pkgs_init() +{ + TARGET=$1 + PROFILE=$2 + Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" + + TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs + if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi + mkdir -p $TMP_PKG_DIR + + PKG_URL=$TIZEN_URL/$PROFILE/latest + + BUILD_XML_URL=$PKG_URL/$BUILD_XML + TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML + TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML + TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML + TMP_PRIMARYGZ=${TMP_PRIMARY}.gz + + Fetch $BUILD_XML_URL $TMP_BUILD + + Debug "fetch $BUILD_XML_URL to $TMP_BUILD" + + TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" + Xpath_get $TARGET_XPATH $TMP_BUILD + TARGET_PATH=$XPATH_RESULT + TARGET_URL=$PKG_URL/$TARGET_PATH + + REPOMD_URL=$TARGET_URL/repodata/repomd.xml + PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' + + Fetch $REPOMD_URL $TMP_REPOMD + + Debug "fetch $REPOMD_URL to $TMP_REPOMD" + + Xpath_get $PRIMARY_XPATH $TMP_REPOMD + PRIMARY_XML_PATH=$XPATH_RESULT + PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH + + Fetch $PRIMARY_URL $TMP_PRIMARYGZ + + Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" + + gunzip $TMP_PRIMARYGZ + + Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" +} + +fetch_tizen_pkgs() +{ + ARCH=$1 + PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' + + PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' + + for pkg in ${@:2} + do + Inform "Fetching... $pkg" + XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + PKG_PATH=$XPATH_RESULT + + XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + CHECKSUM=$XPATH_RESULT + + PKG_URL=$TARGET_URL/$PKG_PATH + PKG_FILE=$(basename $PKG_PATH) + PKG_PATH=$TMPDIR/$PKG_FILE + + Debug "Download $PKG_URL to $PKG_PATH" + Fetch $PKG_URL $PKG_PATH true + + echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null + if [ $? -ne 0 ]; then + Error "Fail to fetch $PKG_URL to $PKG_PATH" + Debug "Checksum = $CHECKSUM" + exit 1 + fi + done +} + +Inform "Initialize ${TIZEN_ARCH} base" +fetch_tizen_pkgs_init standard Tizen-Base +Inform "fetch common packages" +fetch_tizen_pkgs ${TIZEN_ARCH} gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils +Inform "fetch coreclr packages" +fetch_tizen_pkgs ${TIZEN_ARCH} lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu +Inform "fetch corefx packages" +fetch_tizen_pkgs ${TIZEN_ARCH} libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel + +Inform "Initialize standard unified" +fetch_tizen_pkgs_init standard Tizen-Unified +Inform "fetch corefx packages" +fetch_tizen_pkgs ${TIZEN_ARCH} gssdp gssdp-devel tizen-release + diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake new file mode 100644 index 0000000..0998e87 --- /dev/null +++ b/eng/common/cross/toolchain.cmake @@ -0,0 +1,377 @@ +set(CROSS_ROOTFS $ENV{ROOTFS_DIR}) + +# reset platform variables (e.g. cmake 3.25 sets LINUX=1) +unset(LINUX) +unset(FREEBSD) +unset(ILLUMOS) +unset(ANDROID) +unset(TIZEN) +unset(HAIKU) + +set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH}) +if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version) + set(CMAKE_SYSTEM_NAME FreeBSD) + set(FREEBSD 1) +elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc) + set(CMAKE_SYSTEM_NAME SunOS) + set(ILLUMOS 1) +elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h) + set(CMAKE_SYSTEM_NAME Haiku) + set(HAIKU 1) +else() + set(CMAKE_SYSTEM_NAME Linux) + set(LINUX 1) +endif() +set(CMAKE_SYSTEM_VERSION 1) + +if(EXISTS ${CROSS_ROOTFS}/etc/tizen-release) + set(TIZEN 1) +elseif(EXISTS ${CROSS_ROOTFS}/android_platform) + set(ANDROID 1) +endif() + +if(TARGET_ARCH_NAME STREQUAL "arm") + set(CMAKE_SYSTEM_PROCESSOR armv7l) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf) + set(TOOLCHAIN "armv7-alpine-linux-musleabihf") + elseif(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) + set(TOOLCHAIN "armv6-alpine-linux-musleabihf") + else() + set(TOOLCHAIN "arm-linux-gnueabihf") + endif() + if(TIZEN) + set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf/9.2.0") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "arm64") + set(CMAKE_SYSTEM_PROCESSOR aarch64) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl) + set(TOOLCHAIN "aarch64-alpine-linux-musl") + elseif(LINUX) + set(TOOLCHAIN "aarch64-linux-gnu") + if(TIZEN) + set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0") + endif() + elseif(FREEBSD) + set(triple "aarch64-unknown-freebsd12") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "armel") + set(CMAKE_SYSTEM_PROCESSOR armv7l) + set(TOOLCHAIN "arm-linux-gnueabi") + if(TIZEN) + set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "armv6") + set(CMAKE_SYSTEM_PROCESSOR armv6l) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) + set(TOOLCHAIN "armv6-alpine-linux-musleabihf") + else() + set(TOOLCHAIN "arm-linux-gnueabihf") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "ppc64le") + set(CMAKE_SYSTEM_PROCESSOR ppc64le) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl) + set(TOOLCHAIN "powerpc64le-alpine-linux-musl") + else() + set(TOOLCHAIN "powerpc64le-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "riscv64") + set(CMAKE_SYSTEM_PROCESSOR riscv64) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/riscv64-alpine-linux-musl) + set(TOOLCHAIN "riscv64-alpine-linux-musl") + else() + set(TOOLCHAIN "riscv64-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "s390x") + set(CMAKE_SYSTEM_PROCESSOR s390x) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/s390x-alpine-linux-musl) + set(TOOLCHAIN "s390x-alpine-linux-musl") + else() + set(TOOLCHAIN "s390x-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x64") + set(CMAKE_SYSTEM_PROCESSOR x86_64) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/x86_64-alpine-linux-musl) + set(TOOLCHAIN "x86_64-alpine-linux-musl") + elseif(LINUX) + set(TOOLCHAIN "x86_64-linux-gnu") + if(TIZEN) + set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu/9.2.0") + endif() + elseif(FREEBSD) + set(triple "x86_64-unknown-freebsd12") + elseif(ILLUMOS) + set(TOOLCHAIN "x86_64-illumos") + elseif(HAIKU) + set(TOOLCHAIN "x86_64-unknown-haiku") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x86") + set(CMAKE_SYSTEM_PROCESSOR i686) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + set(TOOLCHAIN "i586-alpine-linux-musl") + else() + set(TOOLCHAIN "i686-linux-gnu") + endif() + if(TIZEN) + set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu/9.2.0") + endif() +else() + message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!") +endif() + +if(DEFINED ENV{TOOLCHAIN}) + set(TOOLCHAIN $ENV{TOOLCHAIN}) +endif() + +# Specify include paths +if(TIZEN) + if(TARGET_ARCH_NAME STREQUAL "arm") + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7hl-tizen-linux-gnueabihf) + endif() + if(TARGET_ARCH_NAME STREQUAL "armel") + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi) + endif() + if(TARGET_ARCH_NAME STREQUAL "arm64") + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/) + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu) + endif() + if(TARGET_ARCH_NAME STREQUAL "x86") + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/i586-tizen-linux-gnu) + endif() + if(TARGET_ARCH_NAME STREQUAL "x64") + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/) + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/x86_64-tizen-linux-gnu) + endif() +endif() + +if(ANDROID) + if(TARGET_ARCH_NAME STREQUAL "arm") + set(ANDROID_ABI armeabi-v7a) + elseif(TARGET_ARCH_NAME STREQUAL "arm64") + set(ANDROID_ABI arm64-v8a) + endif() + + # extract platform number required by the NDK's toolchain + file(READ "${CROSS_ROOTFS}/android_platform" RID_FILE_CONTENTS) + string(REPLACE "RID=" "" ANDROID_RID "${RID_FILE_CONTENTS}") + string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "${ANDROID_RID}") + + set(ANDROID_TOOLCHAIN clang) + set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository + set(CMAKE_SYSTEM_LIBRARY_PATH "${CROSS_ROOTFS}/usr/lib") + set(CMAKE_SYSTEM_INCLUDE_PATH "${CROSS_ROOTFS}/usr/include") + + # include official NDK toolchain script + include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake) +elseif(FREEBSD) + # we cross-compile by instructing clang + set(CMAKE_C_COMPILER_TARGET ${triple}) + set(CMAKE_CXX_COMPILER_TARGET ${triple}) + set(CMAKE_ASM_COMPILER_TARGET ${triple}) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=lld") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld") +elseif(ILLUMOS) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + + include_directories(SYSTEM ${CROSS_ROOTFS}/include) + + set(TOOLSET_PREFIX ${TOOLCHAIN}-) + function(locate_toolchain_exec exec var) + string(TOUPPER ${exec} EXEC_UPPERCASE) + if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") + set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) + return() + endif() + + find_program(EXEC_LOCATION_${exec} + NAMES + "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" + "${TOOLSET_PREFIX}${exec}") + + if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") + message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") + endif() + set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) + endfunction() + + set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + + locate_toolchain_exec(gcc CMAKE_C_COMPILER) + locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) + + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") +elseif(HAIKU) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin") + + set(TOOLSET_PREFIX ${TOOLCHAIN}-) + function(locate_toolchain_exec exec var) + string(TOUPPER ${exec} EXEC_UPPERCASE) + if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") + set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) + return() + endif() + + find_program(EXEC_LOCATION_${exec} + NAMES + "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" + "${TOOLSET_PREFIX}${exec}") + + if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") + message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") + endif() + set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) + endfunction() + + set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + + locate_toolchain_exec(gcc CMAKE_C_COMPILER) + locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) + + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") + + # let CMake set up the correct search paths + include(Platform/Haiku) +else() + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + + set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") + set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") + set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") +endif() + +# Specify link flags + +function(add_toolchain_linker_flag Flag) + set(Config "${ARGV1}") + set(CONFIG_SUFFIX "") + if (NOT Config STREQUAL "") + set(CONFIG_SUFFIX "_${Config}") + endif() + set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) + set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) +endfunction() + +if(LINUX) + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib/${TOOLCHAIN}") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}") +endif() + +if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") + if(TIZEN) + add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + endif() +elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64)$") + if(TIZEN) + add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x86") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + add_toolchain_linker_flag("--target=${TOOLCHAIN}") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}") + endif() + add_toolchain_linker_flag(-m32) + if(TIZEN) + add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + endif() +elseif(ILLUMOS) + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib") +elseif(HAIKU) + add_toolchain_linker_flag("-lnetwork") + add_toolchain_linker_flag("-lroot") +endif() + +# Specify compile options + +if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) + set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) + set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) + set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) +endif() + +if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") + add_compile_options(-mthumb) + if (NOT DEFINED CLR_ARM_FPU_TYPE) + set (CLR_ARM_FPU_TYPE vfpv3) + endif (NOT DEFINED CLR_ARM_FPU_TYPE) + + add_compile_options (-mfpu=${CLR_ARM_FPU_TYPE}) + if (NOT DEFINED CLR_ARM_FPU_CAPABILITY) + set (CLR_ARM_FPU_CAPABILITY 0x7) + endif (NOT DEFINED CLR_ARM_FPU_CAPABILITY) + + add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY}) + + # persist variables across multiple try_compile passes + list(APPEND CMAKE_TRY_COMPILE_PLATFORM_VARIABLES CLR_ARM_FPU_TYPE CLR_ARM_FPU_CAPABILITY) + + if(TARGET_ARCH_NAME STREQUAL "armel") + add_compile_options(-mfloat-abi=softfp) + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x86") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + add_compile_options(--target=${TOOLCHAIN}) + endif() + add_compile_options(-m32) + add_compile_options(-Wno-error=unused-command-line-argument) +endif() + +if(TIZEN) + if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|x86)$") + add_compile_options(-Wno-deprecated-declarations) # compile-time option + add_compile_options(-D__extern_always_inline=inline) # compile-time option + endif() +endif() + +# Set LLDB include and library paths for builds that need lldb. +if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") + if(TARGET_ARCH_NAME STREQUAL "x86") + set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}") + else() # arm/armel case + set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}") + endif() + if(LLVM_CROSS_DIR) + set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "") + set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "") + set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "") + set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "") + else() + if(TARGET_ARCH_NAME STREQUAL "x86") + set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "") + set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include") + if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}") + set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}") + else() + set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include") + endif() + else() # arm/armel case + set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "") + set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "") + endif() + endif() +endif() + +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/eng/common/cross/x86/sources.list.bionic b/eng/common/cross/x86/sources.list.bionic new file mode 100644 index 0000000..a71ccad --- /dev/null +++ b/eng/common/cross/x86/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.focal b/eng/common/cross/x86/sources.list.focal new file mode 100644 index 0000000..99d5731 --- /dev/null +++ b/eng/common/cross/x86/sources.list.focal @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ focal main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ focal main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ focal-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ focal-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.jammy b/eng/common/cross/x86/sources.list.jammy new file mode 100644 index 0000000..af1c1fe --- /dev/null +++ b/eng/common/cross/x86/sources.list.jammy @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ jammy main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ jammy main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.trusty b/eng/common/cross/x86/sources.list.trusty new file mode 100644 index 0000000..9b30854 --- /dev/null +++ b/eng/common/cross/x86/sources.list.trusty @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ trusty main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.xenial b/eng/common/cross/x86/sources.list.xenial new file mode 100644 index 0000000..ad9c5a0 --- /dev/null +++ b/eng/common/cross/x86/sources.list.xenial @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/x86/tizen-build-rootfs.sh b/eng/common/cross/x86/tizen-build-rootfs.sh new file mode 100644 index 0000000..f5f955d --- /dev/null +++ b/eng/common/cross/x86/tizen-build-rootfs.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +set -e + +__X86_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__TIZEN_CROSSDIR="$__X86_CrossDir/tizen" + +if [[ -z "$ROOTFS_DIR" ]]; then + echo "ROOTFS_DIR is not defined." + exit 1; +fi + +TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp +mkdir -p $TIZEN_TMP_DIR + +# Download files +echo ">>Start downloading files" +VERBOSE=1 $__X86_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR +echo "<>Start constructing Tizen rootfs" +TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm` +cd $ROOTFS_DIR +for f in $TIZEN_RPM_FILES; do + rpm2cpio $f | cpio -idm --quiet +done +echo "<>Start configuring Tizen rootfs" +ln -sfn asm-x86 ./usr/include/asm +patch -p1 < $__TIZEN_CROSSDIR/tizen.patch +echo "</dev/null; then + VERBOSE=0 +fi + +Log() +{ + if [ $VERBOSE -ge $1 ]; then + echo ${@:2} + fi +} + +Inform() +{ + Log 1 -e "\x1B[0;34m$@\x1B[m" +} + +Debug() +{ + Log 2 -e "\x1B[0;32m$@\x1B[m" +} + +Error() +{ + >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" +} + +Fetch() +{ + URL=$1 + FILE=$2 + PROGRESS=$3 + if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then + CURL_OPT="--progress-bar" + else + CURL_OPT="--silent" + fi + curl $CURL_OPT $URL > $FILE +} + +hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } +hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } +hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } + +TMPDIR=$1 +if [ ! -d $TMPDIR ]; then + TMPDIR=./tizen_tmp + Debug "Create temporary directory : $TMPDIR" + mkdir -p $TMPDIR +fi + +TIZEN_URL=http://download.tizen.org/snapshots/tizen +BUILD_XML=build.xml +REPOMD_XML=repomd.xml +PRIMARY_XML=primary.xml +TARGET_URL="http://__not_initialized" + +Xpath_get() +{ + XPATH_RESULT='' + XPATH=$1 + XML_FILE=$2 + RESULT=$(xmllint --xpath $XPATH $XML_FILE) + if [[ -z ${RESULT// } ]]; then + Error "Can not find target from $XML_FILE" + Debug "Xpath = $XPATH" + exit 1 + fi + XPATH_RESULT=$RESULT +} + +fetch_tizen_pkgs_init() +{ + TARGET=$1 + PROFILE=$2 + Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" + + TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs + if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi + mkdir -p $TMP_PKG_DIR + + PKG_URL=$TIZEN_URL/$PROFILE/latest + + BUILD_XML_URL=$PKG_URL/$BUILD_XML + TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML + TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML + TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML + TMP_PRIMARYGZ=${TMP_PRIMARY}.gz + + Fetch $BUILD_XML_URL $TMP_BUILD + + Debug "fetch $BUILD_XML_URL to $TMP_BUILD" + + TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" + Xpath_get $TARGET_XPATH $TMP_BUILD + TARGET_PATH=$XPATH_RESULT + TARGET_URL=$PKG_URL/$TARGET_PATH + + REPOMD_URL=$TARGET_URL/repodata/repomd.xml + PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' + + Fetch $REPOMD_URL $TMP_REPOMD + + Debug "fetch $REPOMD_URL to $TMP_REPOMD" + + Xpath_get $PRIMARY_XPATH $TMP_REPOMD + PRIMARY_XML_PATH=$XPATH_RESULT + PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH + + Fetch $PRIMARY_URL $TMP_PRIMARYGZ + + Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" + + gunzip $TMP_PRIMARYGZ + + Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" +} + +fetch_tizen_pkgs() +{ + ARCH=$1 + PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' + + PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' + + for pkg in ${@:2} + do + Inform "Fetching... $pkg" + XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + PKG_PATH=$XPATH_RESULT + + XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + CHECKSUM=$XPATH_RESULT + + PKG_URL=$TARGET_URL/$PKG_PATH + PKG_FILE=$(basename $PKG_PATH) + PKG_PATH=$TMPDIR/$PKG_FILE + + Debug "Download $PKG_URL to $PKG_PATH" + Fetch $PKG_URL $PKG_PATH true + + echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null + if [ $? -ne 0 ]; then + Error "Fail to fetch $PKG_URL to $PKG_PATH" + Debug "Checksum = $CHECKSUM" + exit 1 + fi + done +} + +Inform "Initialize i686 base" +fetch_tizen_pkgs_init standard base +Inform "fetch common packages" +fetch_tizen_pkgs i686 gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils +Inform "fetch coreclr packages" +fetch_tizen_pkgs i686 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu +Inform "fetch corefx packages" +fetch_tizen_pkgs i686 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel + +Inform "Initialize standard unified" +fetch_tizen_pkgs_init standard unified +Inform "fetch corefx packages" +fetch_tizen_pkgs i686 gssdp gssdp-devel tizen-release + diff --git a/eng/common/cross/x86/tizen/tizen.patch b/eng/common/cross/x86/tizen/tizen.patch new file mode 100644 index 0000000..f4fe883 --- /dev/null +++ b/eng/common/cross/x86/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-i386) +-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.2 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.2 ) ) diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1 new file mode 100644 index 0000000..435e764 --- /dev/null +++ b/eng/common/darc-init.ps1 @@ -0,0 +1,47 @@ +param ( + $darcVersion = $null, + $versionEndpoint = 'https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16', + $verbosity = 'minimal', + $toolpath = $null +) + +. $PSScriptRoot\tools.ps1 + +function InstallDarcCli ($darcVersion, $toolpath) { + $darcCliPackageName = 'microsoft.dotnet.darc' + + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + $toolList = & "$dotnet" tool list -g + + if ($toolList -like "*$darcCliPackageName*") { + & "$dotnet" tool uninstall $darcCliPackageName -g + } + + # If the user didn't explicitly specify the darc version, + # query the Maestro API for the correct version of darc to install. + if (-not $darcVersion) { + $darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content + } + + $arcadeServicesSource = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' + + Write-Host "Installing Darc CLI version $darcVersion..." + Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' + if (-not $toolpath) { + Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g" + & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g + }else { + Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'" + & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath" + } +} + +try { + InstallDarcCli $darcVersion $toolpath +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Darc' -Message $_ + ExitWithExitCode 1 +} \ No newline at end of file diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh new file mode 100644 index 0000000..84c1d0c --- /dev/null +++ b/eng/common/darc-init.sh @@ -0,0 +1,82 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +darcVersion='' +versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16' +verbosity='minimal' + +while [[ $# > 0 ]]; do + opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + --darcversion) + darcVersion=$2 + shift + ;; + --versionendpoint) + versionEndpoint=$2 + shift + ;; + --verbosity) + verbosity=$2 + shift + ;; + --toolpath) + toolpath=$2 + shift + ;; + *) + echo "Invalid argument: $1" + usage + exit 1 + ;; + esac + + shift +done + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/tools.sh" + +if [ -z "$darcVersion" ]; then + darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain") +fi + +function InstallDarcCli { + local darc_cli_package_name="microsoft.dotnet.darc" + + InitializeDotNetCli true + local dotnet_root=$_InitializeDotNetCli + + if [ -z "$toolpath" ]; then + local tool_list=$($dotnet_root/dotnet tool list -g) + if [[ $tool_list = *$darc_cli_package_name* ]]; then + echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g) + fi + else + local tool_list=$($dotnet_root/dotnet tool list --tool-path "$toolpath") + if [[ $tool_list = *$darc_cli_package_name* ]]; then + echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name --tool-path "$toolpath") + fi + fi + + local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json" + + echo "Installing Darc CLI version $darcVersion..." + echo "You may need to restart your command shell if this is the first dotnet tool you have installed." + if [ -z "$toolpath" ]; then + echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g) + else + echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath") + fi +} + +InstallDarcCli diff --git a/eng/common/dotnet-install-scripts/dotnet-install.ps1 b/eng/common/dotnet-install-scripts/dotnet-install.ps1 new file mode 100644 index 0000000..f63b533 --- /dev/null +++ b/eng/common/dotnet-install-scripts/dotnet-install.ps1 @@ -0,0 +1,774 @@ +# +# Copyright (c) .NET Foundation and contributors. All rights reserved. +# Licensed under the MIT license. See LICENSE file in the project root for full license information. +# + +# Copied from https://dot.net/v1/dotnet-install.ps1 on 8/26/2020 + +<# +.SYNOPSIS + Installs dotnet cli +.DESCRIPTION + Installs dotnet cli. If dotnet installation already exists in the given directory + it will update it only if the requested version differs from the one already installed. +.PARAMETER Channel + Default: LTS + Download from the Channel specified. Possible values: + - Current - most current release + - LTS - most current supported release + - 2-part version in a format A.B - represents a specific release + examples: 2.0, 1.0 + - Branch name + examples: release/2.0.0, Master + Note: The version parameter overrides the channel parameter. +.PARAMETER Version + Default: latest + Represents a build version on specific channel. Possible values: + - latest - most latest build on specific channel + - coherent - most latest coherent build on specific channel + coherent applies only to SDK downloads + - 3-part version in a format A.B.C - represents specific version of build + examples: 2.0.0-preview2-006120, 1.1.0 +.PARAMETER InstallDir + Default: %LocalAppData%\Microsoft\dotnet + Path to where to install dotnet. Note that binaries will be placed directly in a given directory. +.PARAMETER Architecture + Default: - this value represents currently running OS architecture + Architecture of dotnet binaries to be installed. + Possible values are: , amd64, x64, x86, arm64, arm +.PARAMETER SharedRuntime + This parameter is obsolete and may be removed in a future version of this script. + The recommended alternative is '-Runtime dotnet'. + Installs just the shared runtime bits, not the entire SDK. +.PARAMETER Runtime + Installs just a shared runtime, not the entire SDK. + Possible values: + - dotnet - the Microsoft.NETCore.App shared runtime + - aspnetcore - the Microsoft.AspNetCore.App shared runtime + - windowsdesktop - the Microsoft.WindowsDesktop.App shared runtime +.PARAMETER DryRun + If set it will not perform installation but instead display what command line to use to consistently install + currently requested version of dotnet cli. In example if you specify version 'latest' it will display a link + with specific version so that this command can be used deterministicly in a build script. + It also displays binaries location if you prefer to install or download it yourself. +.PARAMETER NoPath + By default this script will set environment variable PATH for the current process to the binaries folder inside installation folder. + If set it will display binaries location but not set any environment variable. +.PARAMETER Verbose + Displays diagnostics information. +.PARAMETER AzureFeed + Default: https://dotnetcli.azureedge.net/dotnet + This parameter typically is not changed by the user. + It allows changing the URL for the Azure feed used by this installer. +.PARAMETER UncachedFeed + This parameter typically is not changed by the user. + It allows changing the URL for the Uncached feed used by this installer. +.PARAMETER FeedCredential + Used as a query string to append to the Azure feed. + It allows changing the URL to use non-public blob storage accounts. +.PARAMETER ProxyAddress + If set, the installer will use the proxy when making web requests +.PARAMETER ProxyUseDefaultCredentials + Default: false + Use default credentials, when using proxy address. +.PARAMETER ProxyBypassList + If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy +.PARAMETER SkipNonVersionedFiles + Default: false + Skips installing non-versioned files if they already exist, such as dotnet.exe. +.PARAMETER NoCdn + Disable downloading from the Azure CDN, and use the uncached feed directly. +.PARAMETER JSonFile + Determines the SDK version from a user specified global.json file + Note: global.json must have a value for 'SDK:Version' +#> +[cmdletbinding()] +param( + [string]$Channel="LTS", + [string]$Version="Latest", + [string]$JSonFile, + [string]$InstallDir="", + [string]$Architecture="", + [ValidateSet("dotnet", "aspnetcore", "windowsdesktop", IgnoreCase = $false)] + [string]$Runtime, + [Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")] + [switch]$SharedRuntime, + [switch]$DryRun, + [switch]$NoPath, + [string]$AzureFeed="https://dotnetcli.azureedge.net/dotnet", + [string]$UncachedFeed="https://dotnetcli.blob.core.windows.net/dotnet", + [string]$FeedCredential, + [string]$ProxyAddress, + [switch]$ProxyUseDefaultCredentials, + [string[]]$ProxyBypassList=@(), + [switch]$SkipNonVersionedFiles, + [switch]$NoCdn +) + +Set-StrictMode -Version Latest +$ErrorActionPreference="Stop" +$ProgressPreference="SilentlyContinue" + +if ($NoCdn) { + $AzureFeed = $UncachedFeed +} + +$BinFolderRelativePath="" + +if ($SharedRuntime -and (-not $Runtime)) { + $Runtime = "dotnet" +} + +# example path with regex: shared/1.0.0-beta-12345/somepath +$VersionRegEx="/\d+\.\d+[^/]+/" +$OverrideNonVersionedFiles = !$SkipNonVersionedFiles + +function Say($str) { + try + { + Write-Host "dotnet-install: $str" + } + catch + { + # Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output + Write-Output "dotnet-install: $str" + } +} + +function Say-Verbose($str) { + try + { + Write-Verbose "dotnet-install: $str" + } + catch + { + # Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output + Write-Output "dotnet-install: $str" + } +} + +function Say-Invocation($Invocation) { + $command = $Invocation.MyCommand; + $args = (($Invocation.BoundParameters.Keys | foreach { "-$_ `"$($Invocation.BoundParameters[$_])`"" }) -join " ") + Say-Verbose "$command $args" +} + +function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) { + $Attempts = 0 + + while ($true) { + try { + return $ScriptBlock.Invoke() + } + catch { + $Attempts++ + if ($Attempts -lt $MaxAttempts) { + Start-Sleep $SecondsBetweenAttempts + } + else { + throw + } + } + } +} + +function Get-Machine-Architecture() { + Say-Invocation $MyInvocation + + # On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems. + # To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432. + # PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE. + # Possible values: amd64, x64, x86, arm64, arm + + if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null ) + { + return $ENV:PROCESSOR_ARCHITEW6432 + } + + return $ENV:PROCESSOR_ARCHITECTURE +} + +function Get-CLIArchitecture-From-Architecture([string]$Architecture) { + Say-Invocation $MyInvocation + + switch ($Architecture.ToLower()) { + { $_ -eq "" } { return Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture) } + { ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" } + { $_ -eq "x86" } { return "x86" } + { $_ -eq "arm" } { return "arm" } + { $_ -eq "arm64" } { return "arm64" } + default { throw "Architecture not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues" } + } +} + +# The version text returned from the feeds is a 1-line or 2-line string: +# For the SDK and the dotnet runtime (2 lines): +# Line 1: # commit_hash +# Line 2: # 4-part version +# For the aspnetcore runtime (1 line): +# Line 1: # 4-part version +function Get-Version-Info-From-Version-Text([string]$VersionText) { + Say-Invocation $MyInvocation + + $Data = -split $VersionText + + $VersionInfo = @{ + CommitHash = $(if ($Data.Count -gt 1) { $Data[0] }) + Version = $Data[-1] # last line is always the version number. + } + return $VersionInfo +} + +function Load-Assembly([string] $Assembly) { + try { + Add-Type -Assembly $Assembly | Out-Null + } + catch { + # On Nano Server, Powershell Core Edition is used. Add-Type is unable to resolve base class assemblies because they are not GAC'd. + # Loading the base class assemblies is not unnecessary as the types will automatically get resolved. + } +} + +function GetHTTPResponse([Uri] $Uri) +{ + Invoke-With-Retry( + { + + $HttpClient = $null + + try { + # HttpClient is used vs Invoke-WebRequest in order to support Nano Server which doesn't support the Invoke-WebRequest cmdlet. + Load-Assembly -Assembly System.Net.Http + + if(-not $ProxyAddress) { + try { + # Despite no proxy being explicitly specified, we may still be behind a default proxy + $DefaultProxy = [System.Net.WebRequest]::DefaultWebProxy; + if($DefaultProxy -and (-not $DefaultProxy.IsBypassed($Uri))) { + $ProxyAddress = $DefaultProxy.GetProxy($Uri).OriginalString + $ProxyUseDefaultCredentials = $true + } + } catch { + # Eat the exception and move forward as the above code is an attempt + # at resolving the DefaultProxy that may not have been a problem. + $ProxyAddress = $null + Say-Verbose("Exception ignored: $_.Exception.Message - moving forward...") + } + } + + if($ProxyAddress) { + $HttpClientHandler = New-Object System.Net.Http.HttpClientHandler + $HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{ + Address=$ProxyAddress; + UseDefaultCredentials=$ProxyUseDefaultCredentials; + BypassList = $ProxyBypassList; + } + $HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler + } + else { + + $HttpClient = New-Object System.Net.Http.HttpClient + } + # Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out + # 20 minutes allows it to work over much slower connections. + $HttpClient.Timeout = New-TimeSpan -Minutes 20 + $Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result + if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) { + # The feed credential is potentially sensitive info. Do not log FeedCredential to console output. + $ErrorMsg = "Failed to download $Uri." + if ($Response -ne $null) { + $ErrorMsg += " $Response" + } + + throw $ErrorMsg + } + + return $Response + } + finally { + if ($HttpClient -ne $null) { + $HttpClient.Dispose() + } + } + }) +} + +function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) { + Say-Invocation $MyInvocation + + $VersionFileUrl = $null + if ($Runtime -eq "dotnet") { + $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version" + } + elseif ($Runtime -eq "aspnetcore") { + $VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version" + } + # Currently, the WindowsDesktop runtime is manufactured with the .Net core runtime + elseif ($Runtime -eq "windowsdesktop") { + $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version" + } + elseif (-not $Runtime) { + if ($Coherent) { + $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version" + } + else { + $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version" + } + } + else { + throw "Invalid value for `$Runtime" + } + try { + $Response = GetHTTPResponse -Uri $VersionFileUrl + } + catch { + throw "Could not resolve version information." + } + $StringContent = $Response.Content.ReadAsStringAsync().Result + + switch ($Response.Content.Headers.ContentType) { + { ($_ -eq "application/octet-stream") } { $VersionText = $StringContent } + { ($_ -eq "text/plain") } { $VersionText = $StringContent } + { ($_ -eq "text/plain; charset=UTF-8") } { $VersionText = $StringContent } + default { throw "``$Response.Content.Headers.ContentType`` is an unknown .version file content type." } + } + + $VersionInfo = Get-Version-Info-From-Version-Text $VersionText + + return $VersionInfo +} + +function Parse-Jsonfile-For-Version([string]$JSonFile) { + Say-Invocation $MyInvocation + + If (-Not (Test-Path $JSonFile)) { + throw "Unable to find '$JSonFile'" + } + try { + $JSonContent = Get-Content($JSonFile) -Raw | ConvertFrom-Json | Select-Object -expand "sdk" -ErrorAction SilentlyContinue + } + catch { + throw "Json file unreadable: '$JSonFile'" + } + if ($JSonContent) { + try { + $JSonContent.PSObject.Properties | ForEach-Object { + $PropertyName = $_.Name + if ($PropertyName -eq "version") { + $Version = $_.Value + Say-Verbose "Version = $Version" + } + } + } + catch { + throw "Unable to parse the SDK node in '$JSonFile'" + } + } + else { + throw "Unable to find the SDK node in '$JSonFile'" + } + If ($Version -eq $null) { + throw "Unable to find the SDK:version node in '$JSonFile'" + } + return $Version +} + +function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel, [string]$Version, [string]$JSonFile) { + Say-Invocation $MyInvocation + + if (-not $JSonFile) { + switch ($Version.ToLower()) { + { $_ -eq "latest" } { + $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False + return $LatestVersionInfo.Version + } + { $_ -eq "coherent" } { + $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True + return $LatestVersionInfo.Version + } + default { return $Version } + } + } + else { + return Parse-Jsonfile-For-Version $JSonFile + } +} + +function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) { + Say-Invocation $MyInvocation + + # If anything fails in this lookup it will default to $SpecificVersion + $SpecificProductVersion = Get-Product-Version -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion + + if ($Runtime -eq "dotnet") { + $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip" + } + elseif ($Runtime -eq "aspnetcore") { + $PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip" + } + elseif ($Runtime -eq "windowsdesktop") { + $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip" + } + elseif (-not $Runtime) { + $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificProductVersion-win-$CLIArchitecture.zip" + } + else { + throw "Invalid value for `$Runtime" + } + + Say-Verbose "Constructed primary named payload URL: $PayloadURL" + + return $PayloadURL, $SpecificProductVersion +} + +function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) { + Say-Invocation $MyInvocation + + if (-not $Runtime) { + $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-dev-win-$CLIArchitecture.$SpecificVersion.zip" + } + elseif ($Runtime -eq "dotnet") { + $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-win-$CLIArchitecture.$SpecificVersion.zip" + } + else { + return $null + } + + Say-Verbose "Constructed legacy named payload URL: $PayloadURL" + + return $PayloadURL +} + +function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) { + Say-Invocation $MyInvocation + + if ($Runtime -eq "dotnet") { + $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt" + } + elseif ($Runtime -eq "aspnetcore") { + $ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt" + } + elseif ($Runtime -eq "windowsdesktop") { + $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt" + } + elseif (-not $Runtime) { + $ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt" + } + else { + throw "Invalid value specified for `$Runtime" + } + + Say-Verbose "Checking for existence of $ProductVersionTxtURL" + + try { + $productVersionResponse = GetHTTPResponse($productVersionTxtUrl) + + if ($productVersionResponse.StatusCode -eq 200) { + $productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim() + if ($productVersion -ne $SpecificVersion) + { + Say "Using alternate version $productVersion found in $ProductVersionTxtURL" + } + + return $productVersion + } + else { + Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion" + $productVersion = $SpecificVersion + } + } catch { + Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion" + $productVersion = $SpecificVersion + } + + return $productVersion +} + +function Get-User-Share-Path() { + Say-Invocation $MyInvocation + + $InstallRoot = $env:DOTNET_INSTALL_DIR + if (!$InstallRoot) { + $InstallRoot = "$env:LocalAppData\Microsoft\dotnet" + } + return $InstallRoot +} + +function Resolve-Installation-Path([string]$InstallDir) { + Say-Invocation $MyInvocation + + if ($InstallDir -eq "") { + return Get-User-Share-Path + } + return $InstallDir +} + +function Is-Dotnet-Package-Installed([string]$InstallRoot, [string]$RelativePathToPackage, [string]$SpecificVersion) { + Say-Invocation $MyInvocation + + $DotnetPackagePath = Join-Path -Path $InstallRoot -ChildPath $RelativePathToPackage | Join-Path -ChildPath $SpecificVersion + Say-Verbose "Is-Dotnet-Package-Installed: DotnetPackagePath=$DotnetPackagePath" + return Test-Path $DotnetPackagePath -PathType Container +} + +function Get-Absolute-Path([string]$RelativeOrAbsolutePath) { + # Too much spam + # Say-Invocation $MyInvocation + + return $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($RelativeOrAbsolutePath) +} + +function Get-Path-Prefix-With-Version($path) { + $match = [regex]::match($path, $VersionRegEx) + if ($match.Success) { + return $entry.FullName.Substring(0, $match.Index + $match.Length) + } + + return $null +} + +function Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package([System.IO.Compression.ZipArchive]$Zip, [string]$OutPath) { + Say-Invocation $MyInvocation + + $ret = @() + foreach ($entry in $Zip.Entries) { + $dir = Get-Path-Prefix-With-Version $entry.FullName + if ($dir -ne $null) { + $path = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $dir) + if (-Not (Test-Path $path -PathType Container)) { + $ret += $dir + } + } + } + + $ret = $ret | Sort-Object | Get-Unique + + $values = ($ret | foreach { "$_" }) -join ";" + Say-Verbose "Directories to unpack: $values" + + return $ret +} + +# Example zip content and extraction algorithm: +# Rule: files if extracted are always being extracted to the same relative path locally +# .\ +# a.exe # file does not exist locally, extract +# b.dll # file exists locally, override only if $OverrideFiles set +# aaa\ # same rules as for files +# ... +# abc\1.0.0\ # directory contains version and exists locally +# ... # do not extract content under versioned part +# abc\asd\ # same rules as for files +# ... +# def\ghi\1.0.1\ # directory contains version and does not exist locally +# ... # extract content +function Extract-Dotnet-Package([string]$ZipPath, [string]$OutPath) { + Say-Invocation $MyInvocation + + Load-Assembly -Assembly System.IO.Compression.FileSystem + Set-Variable -Name Zip + try { + $Zip = [System.IO.Compression.ZipFile]::OpenRead($ZipPath) + + $DirectoriesToUnpack = Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package -Zip $Zip -OutPath $OutPath + + foreach ($entry in $Zip.Entries) { + $PathWithVersion = Get-Path-Prefix-With-Version $entry.FullName + if (($PathWithVersion -eq $null) -Or ($DirectoriesToUnpack -contains $PathWithVersion)) { + $DestinationPath = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $entry.FullName) + $DestinationDir = Split-Path -Parent $DestinationPath + $OverrideFiles=$OverrideNonVersionedFiles -Or (-Not (Test-Path $DestinationPath)) + if ((-Not $DestinationPath.EndsWith("\")) -And $OverrideFiles) { + New-Item -ItemType Directory -Force -Path $DestinationDir | Out-Null + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $DestinationPath, $OverrideNonVersionedFiles) + } + } + } + } + finally { + if ($Zip -ne $null) { + $Zip.Dispose() + } + } +} + +function DownloadFile($Source, [string]$OutPath) { + if ($Source -notlike "http*") { + # Using System.IO.Path.GetFullPath to get the current directory + # does not work in this context - $pwd gives the current directory + if (![System.IO.Path]::IsPathRooted($Source)) { + $Source = $(Join-Path -Path $pwd -ChildPath $Source) + } + $Source = Get-Absolute-Path $Source + Say "Copying file from $Source to $OutPath" + Copy-Item $Source $OutPath + return + } + + $Stream = $null + + try { + $Response = GetHTTPResponse -Uri $Source + $Stream = $Response.Content.ReadAsStreamAsync().Result + $File = [System.IO.File]::Create($OutPath) + $Stream.CopyTo($File) + $File.Close() + } + finally { + if ($Stream -ne $null) { + $Stream.Dispose() + } + } +} + +function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) { + $BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath) + if (-Not $NoPath) { + $SuffixedBinPath = "$BinPath;" + if (-Not $env:path.Contains($SuffixedBinPath)) { + Say "Adding to current process PATH: `"$BinPath`". Note: This change will not be visible if PowerShell was run as a child process." + $env:path = $SuffixedBinPath + $env:path + } else { + Say-Verbose "Current process PATH already contains `"$BinPath`"" + } + } + else { + Say "Binaries of dotnet can be found in $BinPath" + } +} + +$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture +$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile +$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture +$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture + +$InstallRoot = Resolve-Installation-Path $InstallDir +Say-Verbose "InstallRoot: $InstallRoot" +$ScriptName = $MyInvocation.MyCommand.Name + +if ($DryRun) { + Say "Payload URLs:" + Say "Primary named payload URL: $DownloadLink" + if ($LegacyDownloadLink) { + Say "Legacy named payload URL: $LegacyDownloadLink" + } + $RepeatableCommand = ".\$ScriptName -Version `"$SpecificVersion`" -InstallDir `"$InstallRoot`" -Architecture `"$CLIArchitecture`"" + if ($Runtime -eq "dotnet") { + $RepeatableCommand+=" -Runtime `"dotnet`"" + } + elseif ($Runtime -eq "aspnetcore") { + $RepeatableCommand+=" -Runtime `"aspnetcore`"" + } + foreach ($key in $MyInvocation.BoundParameters.Keys) { + if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version") -contains $key)) { + $RepeatableCommand+=" -$key `"$($MyInvocation.BoundParameters[$key])`"" + } + } + Say "Repeatable invocation: $RepeatableCommand" + exit 0 +} + +if ($Runtime -eq "dotnet") { + $assetName = ".NET Core Runtime" + $dotnetPackageRelativePath = "shared\Microsoft.NETCore.App" +} +elseif ($Runtime -eq "aspnetcore") { + $assetName = "ASP.NET Core Runtime" + $dotnetPackageRelativePath = "shared\Microsoft.AspNetCore.App" +} +elseif ($Runtime -eq "windowsdesktop") { + $assetName = ".NET Core Windows Desktop Runtime" + $dotnetPackageRelativePath = "shared\Microsoft.WindowsDesktop.App" +} +elseif (-not $Runtime) { + $assetName = ".NET Core SDK" + $dotnetPackageRelativePath = "sdk" +} +else { + throw "Invalid value for `$Runtime" +} + +if ($SpecificVersion -ne $EffectiveVersion) +{ + Say "Performing installation checks for effective version: $EffectiveVersion" + $SpecificVersion = $EffectiveVersion +} + +# Check if the SDK version is already installed. +$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion +if ($isAssetInstalled) { + Say "$assetName version $SpecificVersion is already installed." + Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath + exit 0 +} + +New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null + +$installDrive = $((Get-Item $InstallRoot).PSDrive.Name); +$diskInfo = Get-PSDrive -Name $installDrive +if ($diskInfo.Free / 1MB -le 100) { + Say "There is not enough disk space on drive ${installDrive}:" + exit 0 +} + +$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName()) +Say-Verbose "Zip path: $ZipPath" + +$DownloadFailed = $false +Say "Downloading link: $DownloadLink" +try { + DownloadFile -Source $DownloadLink -OutPath $ZipPath +} +catch { + Say "Cannot download: $DownloadLink" + if ($LegacyDownloadLink) { + $DownloadLink = $LegacyDownloadLink + $ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName()) + Say-Verbose "Legacy zip path: $ZipPath" + Say "Downloading legacy link: $DownloadLink" + try { + DownloadFile -Source $DownloadLink -OutPath $ZipPath + } + catch { + Say "Cannot download: $DownloadLink" + $DownloadFailed = $true + } + } + else { + $DownloadFailed = $true + } +} + +if ($DownloadFailed) { + throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support" +} + +Say "Extracting zip from $DownloadLink" +Extract-Dotnet-Package -ZipPath $ZipPath -OutPath $InstallRoot + +# Check if the SDK version is installed; if not, fail the installation. +$isAssetInstalled = $false + +# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed. +if ($SpecificVersion -Match "rtm" -or $SpecificVersion -Match "servicing") { + $ReleaseVersion = $SpecificVersion.Split("-")[0] + Say-Verbose "Checking installation: version = $ReleaseVersion" + $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $ReleaseVersion +} + +# Check if the SDK version is installed. +if (!$isAssetInstalled) { + Say-Verbose "Checking installation: version = $SpecificVersion" + $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion +} + +if (!$isAssetInstalled) { + throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error." +} + +Remove-Item $ZipPath + +Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath + +Say "Installation finished" +exit 0 \ No newline at end of file diff --git a/eng/common/dotnet-install-scripts/dotnet-install.sh b/eng/common/dotnet-install-scripts/dotnet-install.sh new file mode 100644 index 0000000..9216114 --- /dev/null +++ b/eng/common/dotnet-install-scripts/dotnet-install.sh @@ -0,0 +1,1133 @@ +#!/usr/bin/env bash +# Copyright (c) .NET Foundation and contributors. All rights reserved. +# Licensed under the MIT license. See LICENSE file in the project root for full license information. +# + +# Stop script on NZEC +set -e +# Stop script if unbound variable found (use ${var:-} if intentional) +set -u +# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success +# This is causing it to fail +set -o pipefail + +# Use in the the functions: eval $invocation +invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"' + +# standard output may be used as a return value in the functions +# we need a way to write text on the screen in the functions so that +# it won't interfere with the return value. +# Exposing stream 3 as a pipe to standard output of the script itself +exec 3>&1 + +# Setup some colors to use. These need to work in fairly limited shells, like the Ubuntu Docker container where there are only 8 colors. +# See if stdout is a terminal +if [ -t 1 ] && command -v tput > /dev/null; then + # see if it supports colors + ncolors=$(tput colors) + if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then + bold="$(tput bold || echo)" + normal="$(tput sgr0 || echo)" + black="$(tput setaf 0 || echo)" + red="$(tput setaf 1 || echo)" + green="$(tput setaf 2 || echo)" + yellow="$(tput setaf 3 || echo)" + blue="$(tput setaf 4 || echo)" + magenta="$(tput setaf 5 || echo)" + cyan="$(tput setaf 6 || echo)" + white="$(tput setaf 7 || echo)" + fi +fi + +say_warning() { + printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}" +} + +say_err() { + printf "%b\n" "${red:-}dotnet_install: Error: $1${normal:-}" >&2 +} + +say() { + # using stream 3 (defined in the beginning) to not interfere with stdout of functions + # which may be used as return value + printf "%b\n" "${cyan:-}dotnet-install:${normal:-} $1" >&3 +} + +say_verbose() { + if [ "$verbose" = true ]; then + say "$1" + fi +} + +# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets, +# then and only then should the Linux distribution appear in this list. +# Adding a Linux distribution to this list does not imply distribution-specific support. +get_legacy_os_name_from_platform() { + eval $invocation + + platform="$1" + case "$platform" in + "centos.7") + echo "centos" + return 0 + ;; + "debian.8") + echo "debian" + return 0 + ;; + "debian.9") + echo "debian.9" + return 0 + ;; + "fedora.23") + echo "fedora.23" + return 0 + ;; + "fedora.24") + echo "fedora.24" + return 0 + ;; + "fedora.27") + echo "fedora.27" + return 0 + ;; + "fedora.28") + echo "fedora.28" + return 0 + ;; + "opensuse.13.2") + echo "opensuse.13.2" + return 0 + ;; + "opensuse.42.1") + echo "opensuse.42.1" + return 0 + ;; + "opensuse.42.3") + echo "opensuse.42.3" + return 0 + ;; + "rhel.7"*) + echo "rhel" + return 0 + ;; + "ubuntu.14.04") + echo "ubuntu" + return 0 + ;; + "ubuntu.16.04") + echo "ubuntu.16.04" + return 0 + ;; + "ubuntu.16.10") + echo "ubuntu.16.10" + return 0 + ;; + "ubuntu.18.04") + echo "ubuntu.18.04" + return 0 + ;; + "alpine.3.4.3") + echo "alpine" + return 0 + ;; + esac + return 1 +} + +get_linux_platform_name() { + eval $invocation + + if [ -n "$runtime_id" ]; then + echo "${runtime_id%-*}" + return 0 + else + if [ -e /etc/os-release ]; then + . /etc/os-release + echo "$ID${VERSION_ID:+.${VERSION_ID}}" + return 0 + elif [ -e /etc/redhat-release ]; then + local redhatRelease=$(&1 || true) | grep -q musl +} + +get_current_os_name() { + eval $invocation + + local uname=$(uname) + if [ "$uname" = "Darwin" ]; then + echo "osx" + return 0 + elif [ "$uname" = "FreeBSD" ]; then + echo "freebsd" + return 0 + elif [ "$uname" = "Linux" ]; then + local linux_platform_name + linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; } + + if [ "$linux_platform_name" = "rhel.6" ]; then + echo $linux_platform_name + return 0 + elif is_musl_based_distro; then + echo "linux-musl" + return 0 + else + echo "linux" + return 0 + fi + fi + + say_err "OS name could not be detected: UName = $uname" + return 1 +} + +get_legacy_os_name() { + eval $invocation + + local uname=$(uname) + if [ "$uname" = "Darwin" ]; then + echo "osx" + return 0 + elif [ -n "$runtime_id" ]; then + echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}") + return 0 + else + if [ -e /etc/os-release ]; then + . /etc/os-release + os=$(get_legacy_os_name_from_platform "$ID${VERSION_ID:+.${VERSION_ID}}" || echo "") + if [ -n "$os" ]; then + echo "$os" + return 0 + fi + fi + fi + + say_verbose "Distribution specific OS name and version could not be detected: UName = $uname" + return 1 +} + +machine_has() { + eval $invocation + + hash "$1" > /dev/null 2>&1 + return $? +} + + +check_min_reqs() { + local hasMinimum=false + if machine_has "curl"; then + hasMinimum=true + elif machine_has "wget"; then + hasMinimum=true + fi + + if [ "$hasMinimum" = "false" ]; then + say_err "curl (recommended) or wget are required to download dotnet. Install missing prerequisite to proceed." + return 1 + fi + return 0 +} + +check_pre_reqs() { + eval $invocation + + if [ "${DOTNET_INSTALL_SKIP_PREREQS:-}" = "1" ]; then + return 0 + fi + + if [ "$(uname)" = "Linux" ]; then + if is_musl_based_distro; then + if ! command -v scanelf > /dev/null; then + say_warning "scanelf not found, please install pax-utils package." + return 0 + fi + LDCONFIG_COMMAND="scanelf --ldpath -BF '%f'" + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libintl)" ] && say_warning "Unable to locate libintl. Probable prerequisite missing; install libintl (or gettext)." + else + if [ ! -x "$(command -v ldconfig)" ]; then + say_verbose "ldconfig is not in PATH, trying /sbin/ldconfig." + LDCONFIG_COMMAND="/sbin/ldconfig" + else + LDCONFIG_COMMAND="ldconfig" + fi + local librarypath=${LD_LIBRARY_PATH:-} + LDCONFIG_COMMAND="$LDCONFIG_COMMAND -NXv ${librarypath//:/ }" + fi + + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep zlib)" ] && say_warning "Unable to locate zlib. Probable prerequisite missing; install zlib." + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep ssl)" ] && say_warning "Unable to locate libssl. Probable prerequisite missing; install libssl." + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libicu)" ] && say_warning "Unable to locate libicu. Probable prerequisite missing; install libicu." + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep lttng)" ] && say_warning "Unable to locate liblttng. Probable prerequisite missing; install libcurl." + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libcurl)" ] && say_warning "Unable to locate libcurl. Probable prerequisite missing; install libcurl." + fi + + return 0 +} + +# args: +# input - $1 +to_lowercase() { + #eval $invocation + + echo "$1" | tr '[:upper:]' '[:lower:]' + return 0 +} + +# args: +# input - $1 +remove_trailing_slash() { + #eval $invocation + + local input="${1:-}" + echo "${input%/}" + return 0 +} + +# args: +# input - $1 +remove_beginning_slash() { + #eval $invocation + + local input="${1:-}" + echo "${input#/}" + return 0 +} + +# args: +# root_path - $1 +# child_path - $2 - this parameter can be empty +combine_paths() { + eval $invocation + + # TODO: Consider making it work with any number of paths. For now: + if [ ! -z "${3:-}" ]; then + say_err "combine_paths: Function takes two parameters." + return 1 + fi + + local root_path="$(remove_trailing_slash "$1")" + local child_path="$(remove_beginning_slash "${2:-}")" + say_verbose "combine_paths: root_path=$root_path" + say_verbose "combine_paths: child_path=$child_path" + echo "$root_path/$child_path" + return 0 +} + +get_machine_architecture() { + eval $invocation + + if command -v uname > /dev/null; then + CPUName=$(uname -m) + case $CPUName in + armv7l) + echo "arm" + return 0 + ;; + aarch64) + echo "arm64" + return 0 + ;; + esac + fi + + # Always default to 'x64' + echo "x64" + return 0 +} + +# args: +# architecture - $1 +get_normalized_architecture_from_architecture() { + eval $invocation + + local architecture="$(to_lowercase "$1")" + case "$architecture" in + \) + echo "$(get_normalized_architecture_from_architecture "$(get_machine_architecture)")" + return 0 + ;; + amd64|x64) + echo "x64" + return 0 + ;; + arm) + echo "arm" + return 0 + ;; + arm64) + echo "arm64" + return 0 + ;; + esac + + say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues" + return 1 +} + +# The version text returned from the feeds is a 1-line or 2-line string: +# For the SDK and the dotnet runtime (2 lines): +# Line 1: # commit_hash +# Line 2: # 4-part version +# For the aspnetcore runtime (1 line): +# Line 1: # 4-part version + +# args: +# version_text - stdin +get_version_from_version_info() { + eval $invocation + + cat | tail -n 1 | sed 's/\r$//' + return 0 +} + +# args: +# install_root - $1 +# relative_path_to_package - $2 +# specific_version - $3 +is_dotnet_package_installed() { + eval $invocation + + local install_root="$1" + local relative_path_to_package="$2" + local specific_version="${3//[$'\t\r\n']}" + + local dotnet_package_path="$(combine_paths "$(combine_paths "$install_root" "$relative_path_to_package")" "$specific_version")" + say_verbose "is_dotnet_package_installed: dotnet_package_path=$dotnet_package_path" + + if [ -d "$dotnet_package_path" ]; then + return 0 + else + return 1 + fi +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# coherent - $4 +get_latest_version_info() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local coherent="$4" + + local version_file_url=null + if [[ "$runtime" == "dotnet" ]]; then + version_file_url="$uncached_feed/Runtime/$channel/latest.version" + elif [[ "$runtime" == "aspnetcore" ]]; then + version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version" + elif [ -z "$runtime" ]; then + if [ "$coherent" = true ]; then + version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version" + else + version_file_url="$uncached_feed/Sdk/$channel/latest.version" + fi + else + say_err "Invalid value for \$runtime" + return 1 + fi + say_verbose "get_latest_version_info: latest url: $version_file_url" + + download "$version_file_url" + return $? +} + +# args: +# json_file - $1 +parse_jsonfile_for_version() { + eval $invocation + + local json_file="$1" + if [ ! -f "$json_file" ]; then + say_err "Unable to find \`$json_file\`" + return 1 + fi + + sdk_section=$(cat $json_file | awk '/"sdk"/,/}/') + if [ -z "$sdk_section" ]; then + say_err "Unable to parse the SDK node in \`$json_file\`" + return 1 + fi + + sdk_list=$(echo $sdk_section | awk -F"[{}]" '{print $2}') + sdk_list=${sdk_list//[\" ]/} + sdk_list=${sdk_list//,/$'\n'} + sdk_list="$(echo -e "${sdk_list}" | tr -d '[[:space:]]')" + + local version_info="" + while read -r line; do + IFS=: + while read -r key value; do + if [[ "$key" == "version" ]]; then + version_info=$value + fi + done <<< "$line" + done <<< "$sdk_list" + if [ -z "$version_info" ]; then + say_err "Unable to find the SDK:version node in \`$json_file\`" + return 1 + fi + + unset IFS; + echo "$version_info" + return 0 +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# version - $4 +# json_file - $5 +get_specific_version_from_version() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local version="$(to_lowercase "$4")" + local json_file="$5" + + if [ -z "$json_file" ]; then + case "$version" in + latest) + local version_info + version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1 + say_verbose "get_specific_version_from_version: version_info=$version_info" + echo "$version_info" | get_version_from_version_info + return 0 + ;; + coherent) + local version_info + version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" true)" || return 1 + say_verbose "get_specific_version_from_version: version_info=$version_info" + echo "$version_info" | get_version_from_version_info + return 0 + ;; + *) + echo "$version" + return 0 + ;; + esac + else + local version_info + version_info="$(parse_jsonfile_for_version "$json_file")" || return 1 + echo "$version_info" + return 0 + fi +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# specific_version - $4 +construct_download_link() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local specific_version="${4//[$'\t\r\n']}" + local specific_product_version="$(get_specific_product_version "$1" "$4")" + + local osname + osname="$(get_current_os_name)" || return 1 + + local download_link=null + if [[ "$runtime" == "dotnet" ]]; then + download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz" + elif [[ "$runtime" == "aspnetcore" ]]; then + download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz" + elif [ -z "$runtime" ]; then + download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_product_version-$osname-$normalized_architecture.tar.gz" + else + return 1 + fi + + echo "$download_link" + return 0 +} + +# args: +# azure_feed - $1 +# specific_version - $2 +get_specific_product_version() { + # If we find a 'productVersion.txt' at the root of any folder, we'll use its contents + # to resolve the version of what's in the folder, superseding the specified version. + eval $invocation + + local azure_feed="$1" + local specific_version="${2//[$'\t\r\n']}" + local specific_product_version=$specific_version + + local download_link=null + if [[ "$runtime" == "dotnet" ]]; then + download_link="$azure_feed/Runtime/$specific_version/productVersion.txt${feed_credential}" + elif [[ "$runtime" == "aspnetcore" ]]; then + download_link="$azure_feed/aspnetcore/Runtime/$specific_version/productVersion.txt${feed_credential}" + elif [ -z "$runtime" ]; then + download_link="$azure_feed/Sdk/$specific_version/productVersion.txt${feed_credential}" + else + return 1 + fi + + specific_product_version=$(curl -s --fail "$download_link") + if [ $? -ne 0 ] + then + specific_product_version=$(wget -qO- "$download_link") + if [ $? -ne 0 ] + then + specific_product_version=$specific_version + fi + fi + specific_product_version="${specific_product_version//[$'\t\r\n']}" + + echo "$specific_product_version" + return 0 +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# specific_version - $4 +construct_legacy_download_link() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local specific_version="${4//[$'\t\r\n']}" + + local distro_specific_osname + distro_specific_osname="$(get_legacy_os_name)" || return 1 + + local legacy_download_link=null + if [[ "$runtime" == "dotnet" ]]; then + legacy_download_link="$azure_feed/Runtime/$specific_version/dotnet-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz" + elif [ -z "$runtime" ]; then + legacy_download_link="$azure_feed/Sdk/$specific_version/dotnet-dev-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz" + else + return 1 + fi + + echo "$legacy_download_link" + return 0 +} + +get_user_install_path() { + eval $invocation + + if [ ! -z "${DOTNET_INSTALL_DIR:-}" ]; then + echo "$DOTNET_INSTALL_DIR" + else + echo "$HOME/.dotnet" + fi + return 0 +} + +# args: +# install_dir - $1 +resolve_installation_path() { + eval $invocation + + local install_dir=$1 + if [ "$install_dir" = "" ]; then + local user_install_path="$(get_user_install_path)" + say_verbose "resolve_installation_path: user_install_path=$user_install_path" + echo "$user_install_path" + return 0 + fi + + echo "$install_dir" + return 0 +} + +# args: +# relative_or_absolute_path - $1 +get_absolute_path() { + eval $invocation + + local relative_or_absolute_path=$1 + echo "$(cd "$(dirname "$1")" && pwd -P)/$(basename "$1")" + return 0 +} + +# args: +# input_files - stdin +# root_path - $1 +# out_path - $2 +# override - $3 +copy_files_or_dirs_from_list() { + eval $invocation + + local root_path="$(remove_trailing_slash "$1")" + local out_path="$(remove_trailing_slash "$2")" + local override="$3" + local osname="$(get_current_os_name)" + local override_switch=$( + if [ "$override" = false ]; then + if [ "$osname" = "linux-musl" ]; then + printf -- "-u"; + else + printf -- "-n"; + fi + fi) + + cat | uniq | while read -r file_path; do + local path="$(remove_beginning_slash "${file_path#$root_path}")" + local target="$out_path/$path" + if [ "$override" = true ] || (! ([ -d "$target" ] || [ -e "$target" ])); then + mkdir -p "$out_path/$(dirname "$path")" + if [ -d "$target" ]; then + rm -rf "$target" + fi + cp -R $override_switch "$root_path/$path" "$target" + fi + done +} + +# args: +# zip_path - $1 +# out_path - $2 +extract_dotnet_package() { + eval $invocation + + local zip_path="$1" + local out_path="$2" + + local temp_out_path="$(mktemp -d "$temporary_file_template")" + + local failed=false + tar -xzf "$zip_path" -C "$temp_out_path" > /dev/null || failed=true + + local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/' + find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false + find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files" + + rm -rf "$temp_out_path" + + if [ "$failed" = true ]; then + say_err "Extraction failed" + return 1 + fi +} + +# args: +# remote_path - $1 +# [out_path] - $2 - stdout if not provided +download() { + eval $invocation + + local remote_path="$1" + local out_path="${2:-}" + + if [[ "$remote_path" != "http"* ]]; then + cp "$remote_path" "$out_path" + return $? + fi + + local failed=false + if machine_has "curl"; then + downloadcurl "$remote_path" "$out_path" || failed=true + elif machine_has "wget"; then + downloadwget "$remote_path" "$out_path" || failed=true + else + failed=true + fi + if [ "$failed" = true ]; then + say_verbose "Download failed: $remote_path" + return 1 + fi + return 0 +} + +downloadcurl() { + eval $invocation + local remote_path="$1" + local out_path="${2:-}" + + # Append feed_credential as late as possible before calling curl to avoid logging feed_credential + remote_path="${remote_path}${feed_credential}" + + local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs " + local failed=false + if [ -z "$out_path" ]; then + curl $curl_options "$remote_path" || failed=true + else + curl $curl_options -o "$out_path" "$remote_path" || failed=true + fi + if [ "$failed" = true ]; then + say_verbose "Curl download failed" + return 1 + fi + return 0 +} + +downloadwget() { + eval $invocation + local remote_path="$1" + local out_path="${2:-}" + + # Append feed_credential as late as possible before calling wget to avoid logging feed_credential + remote_path="${remote_path}${feed_credential}" + local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 " + local failed=false + if [ -z "$out_path" ]; then + wget -q $wget_options -O - "$remote_path" || failed=true + else + wget $wget_options -O "$out_path" "$remote_path" || failed=true + fi + if [ "$failed" = true ]; then + say_verbose "Wget download failed" + return 1 + fi + return 0 +} + +calculate_vars() { + eval $invocation + valid_legacy_download_link=true + + normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")" + say_verbose "normalized_architecture=$normalized_architecture" + + specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version" "$json_file")" + specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version")" + say_verbose "specific_version=$specific_version" + if [ -z "$specific_version" ]; then + say_err "Could not resolve version information." + return 1 + fi + + download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" + say_verbose "Constructed primary named payload URL: $download_link" + + legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false + + if [ "$valid_legacy_download_link" = true ]; then + say_verbose "Constructed legacy named payload URL: $legacy_download_link" + else + say_verbose "Cound not construct a legacy_download_link; omitting..." + fi + + install_root="$(resolve_installation_path "$install_dir")" + say_verbose "InstallRoot: $install_root" +} + +install_dotnet() { + eval $invocation + local download_failed=false + local asset_name='' + local asset_relative_path='' + + if [[ "$runtime" == "dotnet" ]]; then + asset_relative_path="shared/Microsoft.NETCore.App" + asset_name=".NET Core Runtime" + elif [[ "$runtime" == "aspnetcore" ]]; then + asset_relative_path="shared/Microsoft.AspNetCore.App" + asset_name="ASP.NET Core Runtime" + elif [ -z "$runtime" ]; then + asset_relative_path="sdk" + asset_name=".NET Core SDK" + else + say_err "Invalid value for \$runtime" + return 1 + fi + + # Check if the SDK version is already installed. + if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then + say "$asset_name version $specific_version is already installed." + return 0 + fi + + mkdir -p "$install_root" + zip_path="$(mktemp "$temporary_file_template")" + say_verbose "Zip path: $zip_path" + + say "Downloading link: $download_link" + + # Failures are normal in the non-legacy case for ultimately legacy downloads. + # Do not output to stderr, since output to stderr is considered an error. + download "$download_link" "$zip_path" 2>&1 || download_failed=true + + # if the download fails, download the legacy_download_link + if [ "$download_failed" = true ]; then + say "Cannot download: $download_link" + + if [ "$valid_legacy_download_link" = true ]; then + download_failed=false + download_link="$legacy_download_link" + zip_path="$(mktemp "$temporary_file_template")" + say_verbose "Legacy zip path: $zip_path" + say "Downloading legacy link: $download_link" + download "$download_link" "$zip_path" 2>&1 || download_failed=true + + if [ "$download_failed" = true ]; then + say "Cannot download: $download_link" + fi + fi + fi + + if [ "$download_failed" = true ]; then + say_err "Could not find/download: \`$asset_name\` with version = $specific_version" + say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support" + return 1 + fi + + say "Extracting zip from $download_link" + extract_dotnet_package "$zip_path" "$install_root" + + # Check if the SDK version is installed; if not, fail the installation. + # if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed. + if [[ $specific_version == *"rtm"* || $specific_version == *"servicing"* ]]; then + IFS='-' + read -ra verArr <<< "$specific_version" + release_version="${verArr[0]}" + unset IFS; + say_verbose "Checking installation: version = $release_version" + if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$release_version"; then + return 0 + fi + fi + + # Check if the standard SDK version is installed. + say_verbose "Checking installation: version = $specific_product_version" + if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_product_version"; then + return 0 + fi + + say_err "\`$asset_name\` with version = $specific_product_version failed to install with an unknown error." + return 1 +} + +args=("$@") + +local_version_file_relative_path="/.version" +bin_folder_relative_path="" +temporary_file_template="${TMPDIR:-/tmp}/dotnet.XXXXXXXXX" + +channel="LTS" +version="Latest" +json_file="" +install_dir="" +architecture="" +dry_run=false +no_path=false +no_cdn=false +azure_feed="https://dotnetcli.azureedge.net/dotnet" +uncached_feed="https://dotnetcli.blob.core.windows.net/dotnet" +feed_credential="" +verbose=false +runtime="" +runtime_id="" +override_non_versioned_files=true +non_dynamic_parameters="" + +while [ $# -ne 0 ] +do + name="$1" + case "$name" in + -c|--channel|-[Cc]hannel) + shift + channel="$1" + ;; + -v|--version|-[Vv]ersion) + shift + version="$1" + ;; + -i|--install-dir|-[Ii]nstall[Dd]ir) + shift + install_dir="$1" + ;; + --arch|--architecture|-[Aa]rch|-[Aa]rchitecture) + shift + architecture="$1" + ;; + --shared-runtime|-[Ss]hared[Rr]untime) + say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'." + if [ -z "$runtime" ]; then + runtime="dotnet" + fi + ;; + --runtime|-[Rr]untime) + shift + runtime="$1" + if [[ "$runtime" != "dotnet" ]] && [[ "$runtime" != "aspnetcore" ]]; then + say_err "Unsupported value for --runtime: '$1'. Valid values are 'dotnet' and 'aspnetcore'." + if [[ "$runtime" == "windowsdesktop" ]]; then + say_err "WindowsDesktop archives are manufactured for Windows platforms only." + fi + exit 1 + fi + ;; + --dry-run|-[Dd]ry[Rr]un) + dry_run=true + ;; + --no-path|-[Nn]o[Pp]ath) + no_path=true + non_dynamic_parameters+=" $name" + ;; + --verbose|-[Vv]erbose) + verbose=true + non_dynamic_parameters+=" $name" + ;; + --no-cdn|-[Nn]o[Cc]dn) + no_cdn=true + non_dynamic_parameters+=" $name" + ;; + --azure-feed|-[Aa]zure[Ff]eed) + shift + azure_feed="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --uncached-feed|-[Uu]ncached[Ff]eed) + shift + uncached_feed="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --feed-credential|-[Ff]eed[Cc]redential) + shift + feed_credential="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --runtime-id|-[Rr]untime[Ii]d) + shift + runtime_id="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --jsonfile|-[Jj][Ss]on[Ff]ile) + shift + json_file="$1" + ;; + --skip-non-versioned-files|-[Ss]kip[Nn]on[Vv]ersioned[Ff]iles) + override_non_versioned_files=false + non_dynamic_parameters+=" $name" + ;; + -?|--?|-h|--help|-[Hh]elp) + script_name="$(basename "$0")" + echo ".NET Tools Installer" + echo "Usage: $script_name [-c|--channel ] [-v|--version ] [-p|--prefix ]" + echo " $script_name -h|-?|--help" + echo "" + echo "$script_name is a simple command line interface for obtaining dotnet cli." + echo "" + echo "Options:" + echo " -c,--channel Download from the channel specified, Defaults to \`$channel\`." + echo " -Channel" + echo " Possible values:" + echo " - Current - most current release" + echo " - LTS - most current supported release" + echo " - 2-part version in a format A.B - represents a specific release" + echo " examples: 2.0; 1.0" + echo " - Branch name" + echo " examples: release/2.0.0; Master" + echo " Note: The version parameter overrides the channel parameter." + echo " -v,--version Use specific VERSION, Defaults to \`$version\`." + echo " -Version" + echo " Possible values:" + echo " - latest - most latest build on specific channel" + echo " - coherent - most latest coherent build on specific channel" + echo " coherent applies only to SDK downloads" + echo " - 3-part version in a format A.B.C - represents specific version of build" + echo " examples: 2.0.0-preview2-006120; 1.1.0" + echo " -i,--install-dir Install under specified location (see Install Location below)" + echo " -InstallDir" + echo " --architecture Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`." + echo " --arch,-Architecture,-Arch" + echo " Possible values: x64, arm, and arm64" + echo " --runtime Installs a shared runtime only, without the SDK." + echo " -Runtime" + echo " Possible values:" + echo " - dotnet - the Microsoft.NETCore.App shared runtime" + echo " - aspnetcore - the Microsoft.AspNetCore.App shared runtime" + echo " --dry-run,-DryRun Do not perform installation. Display download link." + echo " --no-path, -NoPath Do not set PATH for the current process." + echo " --verbose,-Verbose Display diagnostics information." + echo " --azure-feed,-AzureFeed Azure feed location. Defaults to $azure_feed, This parameter typically is not changed by the user." + echo " --uncached-feed,-UncachedFeed Uncached feed location. This parameter typically is not changed by the user." + echo " --feed-credential,-FeedCredential Azure feed shared access token. This parameter typically is not specified." + echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable." + echo " -SkipNonVersionedFiles" + echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly." + echo " --jsonfile Determines the SDK version from a user specified global.json file." + echo " Note: global.json must have a value for 'SDK:Version'" + echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)." + echo " -RuntimeId" + echo " -?,--?,-h,--help,-Help Shows this help message" + echo "" + echo "Obsolete parameters:" + echo " --shared-runtime The recommended alternative is '--runtime dotnet'." + echo " This parameter is obsolete and may be removed in a future version of this script." + echo " Installs just the shared runtime bits, not the entire SDK." + echo "" + echo "Install Location:" + echo " Location is chosen in following order:" + echo " - --install-dir option" + echo " - Environmental variable DOTNET_INSTALL_DIR" + echo " - $HOME/.dotnet" + exit 0 + ;; + *) + say_err "Unknown argument \`$name\`" + exit 1 + ;; + esac + + shift +done + +if [ "$no_cdn" = true ]; then + azure_feed="$uncached_feed" +fi + +check_min_reqs +calculate_vars +script_name=$(basename "$0") + +if [ "$dry_run" = true ]; then + say "Payload URLs:" + say "Primary named payload URL: $download_link" + if [ "$valid_legacy_download_link" = true ]; then + say "Legacy named payload URL: $legacy_download_link" + fi + repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\""" + if [[ "$runtime" == "dotnet" ]]; then + repeatable_command+=" --runtime "\""dotnet"\""" + elif [[ "$runtime" == "aspnetcore" ]]; then + repeatable_command+=" --runtime "\""aspnetcore"\""" + fi + repeatable_command+="$non_dynamic_parameters" + say "Repeatable invocation: $repeatable_command" + exit 0 +fi + +check_pre_reqs +install_dotnet + +bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")" +if [ "$no_path" = false ]; then + say "Adding to current process PATH: \`$bin_path\`. Note: This change will be visible only when sourcing script." + export PATH="$bin_path":"$PATH" +else + say "Binaries of dotnet can be found in $bin_path" +fi + +say "Installation finished successfully." diff --git a/eng/common/dotnet-install.cmd b/eng/common/dotnet-install.cmd new file mode 100644 index 0000000..b1c2642 --- /dev/null +++ b/eng/common/dotnet-install.cmd @@ -0,0 +1,2 @@ +@echo off +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*" \ No newline at end of file diff --git a/eng/common/dotnet-install.ps1 b/eng/common/dotnet-install.ps1 new file mode 100644 index 0000000..811f0f7 --- /dev/null +++ b/eng/common/dotnet-install.ps1 @@ -0,0 +1,28 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string] $verbosity = 'minimal', + [string] $architecture = '', + [string] $version = 'Latest', + [string] $runtime = 'dotnet', + [string] $RuntimeSourceFeed = '', + [string] $RuntimeSourceFeedKey = '' +) + +. $PSScriptRoot\tools.ps1 + +$dotnetRoot = Join-Path $RepoRoot '.dotnet' + +$installdir = $dotnetRoot +try { + if ($architecture -and $architecture.Trim() -eq 'x86') { + $installdir = Join-Path $installdir 'x86' + } + InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + ExitWithExitCode 1 +} + +ExitWithExitCode 0 diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh new file mode 100644 index 0000000..7e69e3a --- /dev/null +++ b/eng/common/dotnet-install.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/tools.sh" + +version='Latest' +architecture='' +runtime='dotnet' +runtimeSourceFeed='' +runtimeSourceFeedKey='' +while [[ $# > 0 ]]; do + opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -version|-v) + shift + version="$1" + ;; + -architecture|-a) + shift + architecture="$1" + ;; + -runtime|-r) + shift + runtime="$1" + ;; + -runtimesourcefeed) + shift + runtimeSourceFeed="$1" + ;; + -runtimesourcefeedkey) + shift + runtimeSourceFeedKey="$1" + ;; + *) + Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1" + exit 1 + ;; + esac + shift +done + +# Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples +cpuname=$(uname -m) +case $cpuname in + arm64|aarch64) + buildarch=arm64 + if [ "$(getconf LONG_BIT)" -lt 64 ]; then + # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) + buildarch=arm + fi + ;; + loongarch64) + buildarch=loongarch64 + ;; + amd64|x86_64) + buildarch=x64 + ;; + armv*l) + buildarch=arm + ;; + i[3-6]86) + buildarch=x86 + ;; + *) + echo "Unknown CPU $cpuname detected, treating it as x64" + buildarch=x64 + ;; +esac + +dotnetRoot="${repo_root}.dotnet" +if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then + dotnetRoot="$dotnetRoot/$architecture" +fi + +InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || { + local exit_code=$? + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2 + ExitWithExitCode $exit_code +} + +ExitWithExitCode 0 diff --git a/eng/common/enable-cross-org-publishing.ps1 b/eng/common/enable-cross-org-publishing.ps1 new file mode 100644 index 0000000..da09da4 --- /dev/null +++ b/eng/common/enable-cross-org-publishing.ps1 @@ -0,0 +1,13 @@ +param( + [string] $token +) + + +. $PSScriptRoot\pipeline-logging-functions.ps1 + +# Write-PipelineSetVariable will no-op if a variable named $ci is not defined +# Since this script is only ever called in AzDO builds, just universally set it +$ci = $true + +Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false diff --git a/eng/common/generate-graph-files.ps1 b/eng/common/generate-graph-files.ps1 new file mode 100644 index 0000000..0728b1a --- /dev/null +++ b/eng/common/generate-graph-files.ps1 @@ -0,0 +1,86 @@ +Param( + [Parameter(Mandatory=$true)][string] $barToken, # Token generated at https://maestro-prod.westus2.cloudapp.azure.com/Account/Tokens + [Parameter(Mandatory=$true)][string] $gitHubPat, # GitHub personal access token from https://github.com/settings/tokens (no auth scopes needed) + [Parameter(Mandatory=$true)][string] $azdoPat, # Azure Dev Ops tokens from https://dev.azure.com/dnceng/_details/security/tokens (code read scope needed) + [Parameter(Mandatory=$true)][string] $outputFolder, # Where the graphviz.txt file will be created + [string] $darcVersion, # darc's version + [string] $graphvizVersion = '2.38', # GraphViz version + [switch] $includeToolset # Whether the graph should include toolset dependencies or not. i.e. arcade, optimization. For more about + # toolset dependencies see https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#toolset-vs-product-dependencies +) + +function CheckExitCode ([string]$stage) +{ + $exitCode = $LASTEXITCODE + if ($exitCode -ne 0) { + Write-PipelineTelemetryError -Category 'Arcade' -Message "Something failed in stage: '$stage'. Check for errors above. Exiting now..." + ExitWithExitCode $exitCode + } +} + +try { + $ErrorActionPreference = 'Stop' + . $PSScriptRoot\tools.ps1 + + Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1') + + Push-Location $PSScriptRoot + + Write-Host 'Installing darc...' + . .\darc-init.ps1 -darcVersion $darcVersion + CheckExitCode 'Running darc-init' + + $engCommonBaseDir = Join-Path $PSScriptRoot 'native\' + $graphvizInstallDir = CommonLibrary\Get-NativeInstallDirectory + $nativeToolBaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external' + $installBin = Join-Path $graphvizInstallDir 'bin' + + Write-Host 'Installing dot...' + .\native\install-tool.ps1 -ToolName graphviz -InstallPath $installBin -BaseUri $nativeToolBaseUri -CommonLibraryDirectory $engCommonBaseDir -Version $graphvizVersion -Verbose + + $darcExe = "$env:USERPROFILE\.dotnet\tools" + $darcExe = Resolve-Path "$darcExe\darc.exe" + + Create-Directory $outputFolder + + # Generate 3 graph descriptions: + # 1. Flat with coherency information + # 2. Graphviz (dot) file + # 3. Standard dependency graph + $graphVizFilePath = "$outputFolder\graphviz.txt" + $graphVizImageFilePath = "$outputFolder\graph.png" + $normalGraphFilePath = "$outputFolder\graph-full.txt" + $flatGraphFilePath = "$outputFolder\graph-flat.txt" + $baseOptions = @( '--github-pat', "$gitHubPat", '--azdev-pat', "$azdoPat", '--password', "$barToken" ) + + if ($includeToolset) { + Write-Host 'Toolsets will be included in the graph...' + $baseOptions += @( '--include-toolset' ) + } + + Write-Host 'Generating standard dependency graph...' + & "$darcExe" get-dependency-graph @baseOptions --output-file $normalGraphFilePath + CheckExitCode 'Generating normal dependency graph' + + Write-Host 'Generating flat dependency graph and graphviz file...' + & "$darcExe" get-dependency-graph @baseOptions --flat --coherency --graphviz $graphVizFilePath --output-file $flatGraphFilePath + CheckExitCode 'Generating flat and graphviz dependency graph' + + Write-Host "Generating graph image $graphVizFilePath" + $dotFilePath = Join-Path $installBin "graphviz\$graphvizVersion\release\bin\dot.exe" + & "$dotFilePath" -Tpng -o"$graphVizImageFilePath" "$graphVizFilePath" + CheckExitCode 'Generating graphviz image' + + Write-Host "'$graphVizFilePath', '$flatGraphFilePath', '$normalGraphFilePath' and '$graphVizImageFilePath' created!" +} +catch { + if (!$includeToolset) { + Write-Host 'This might be a toolset repo which includes only toolset dependencies. ' -NoNewline -ForegroundColor Yellow + Write-Host 'Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again...' -ForegroundColor Yellow + } + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Arcade' -Message $_ + ExitWithExitCode 1 +} finally { + Pop-Location +} \ No newline at end of file diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1 new file mode 100644 index 0000000..524aaa5 --- /dev/null +++ b/eng/common/generate-locproject.ps1 @@ -0,0 +1,189 @@ +Param( + [Parameter(Mandatory=$true)][string] $SourcesDirectory, # Directory where source files live; if using a Localize directory it should live in here + [string] $LanguageSet = 'VS_Main_Languages', # Language set to be used in the LocProject.json + [switch] $UseCheckedInLocProjectJson, # When set, generates a LocProject.json and compares it to one that already exists in the repo; otherwise just generates one + [switch] $CreateNeutralXlfs # Creates neutral xlf files. Only set to false when running locally +) + +# Generates LocProject.json files for the OneLocBuild task. OneLocBuildTask is described here: +# https://ceapex.visualstudio.com/CEINTL/_wiki/wikis/CEINTL.wiki/107/Localization-with-OneLocBuild-Task + +Set-StrictMode -Version 2.0 +$ErrorActionPreference = "Stop" +. $PSScriptRoot\pipeline-logging-functions.ps1 + +$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json" +$exclusions = @{ Exclusions = @() } +if (Test-Path -Path $exclusionsFilePath) +{ + $exclusions = Get-Content "$exclusionsFilePath" | ConvertFrom-Json +} + +Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to work + +# Template files +$jsonFiles = @() +$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern +$jsonTemplateFiles | ForEach-Object { + $null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json + + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).json" + $jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru +} + +$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern + +$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them +if (-not $wxlFiles) { + $wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files + if ($wxlEnFiles) { + $wxlFiles = @() + $wxlEnFiles | ForEach-Object { + $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" + $wxlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru + } + } +} + +$macosHtmlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\.lproj\\.+\.html$" } # add installer HTML files +$macosHtmlFiles = @() +if ($macosHtmlEnFiles) { + $macosHtmlEnFiles | ForEach-Object { + $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" + $macosHtmlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru + } +} + +$xlfFiles = @() + +$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf" +$langXlfFiles = @() +if ($allXlfFiles) { + $null = $allXlfFiles[0].FullName -Match "\.([\w-]+)\.xlf" # matches '[langcode].xlf' + $firstLangCode = $Matches.1 + $langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf" +} +$langXlfFiles | ForEach-Object { + $null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf + + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf" + $xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru +} + +$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles + +$locJson = @{ + Projects = @( + @{ + LanguageSet = $LanguageSet + LocItems = @( + $locFiles | ForEach-Object { + $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) + { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + if (!$CreateNeutralXlfs -and $_.Extension -eq '.xlf') { + Remove-Item -Path $sourceFile + } + if ($continue) + { + if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\" + } + } else { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnName" + OutputPath = $outputPath + } + } + } + } + ) + }, + @{ + LanguageSet = $LanguageSet + CloneLanguageSet = "WiX_CloneLanguages" + LssFiles = @( "wxl_loc.lss" ) + LocItems = @( + $wxlFiles | ForEach-Object { + $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + if ($continue) + { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = $outputPath + } + } + } + ) + }, + @{ + LanguageSet = $LanguageSet + CloneLanguageSet = "VS_macOS_CloneLanguages" + LssFiles = @( ".\eng\common\loc\P22DotNetHtmlLocalization.lss" ) + LocItems = @( + $macosHtmlFiles | ForEach-Object { + $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + $lciFile = $sourceFile + ".lci" + if ($continue) { + $result = @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = $outputPath + } + if (Test-Path $lciFile -PathType Leaf) { + $result["LciFile"] = $lciFile + } + return $result + } + } + ) + } + ) +} + +$json = ConvertTo-Json $locJson -Depth 5 +Write-Host "LocProject.json generated:`n`n$json`n`n" +Pop-Location + +if (!$UseCheckedInLocProjectJson) { + New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created + Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json +} +else { + New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created + Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json + + if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) { + Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them." + + exit 1 + } + else { + Write-Host "Generated LocProject.json and current LocProject.json are identical." + } +} diff --git a/eng/common/generate-sbom-prep.ps1 b/eng/common/generate-sbom-prep.ps1 new file mode 100644 index 0000000..3e5c1c7 --- /dev/null +++ b/eng/common/generate-sbom-prep.ps1 @@ -0,0 +1,21 @@ +Param( + [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed +) + +. $PSScriptRoot\pipeline-logging-functions.ps1 + +Write-Host "Creating dir $ManifestDirPath" +# create directory for sbom manifest to be placed +if (!(Test-Path -path $ManifestDirPath)) +{ + New-Item -ItemType Directory -path $ManifestDirPath + Write-Host "Successfully created directory $ManifestDirPath" +} +else{ + Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." +} + +Write-Host "Updating artifact name" +$artifact_name = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" -replace '["/:<>\\|?@*"() ]', '_' +Write-Host "Artifact name $artifact_name" +Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$artifact_name" diff --git a/eng/common/generate-sbom-prep.sh b/eng/common/generate-sbom-prep.sh new file mode 100644 index 0000000..d5c76dc --- /dev/null +++ b/eng/common/generate-sbom-prep.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" +. $scriptroot/pipeline-logging-functions.sh + +manifest_dir=$1 + +if [ ! -d "$manifest_dir" ] ; then + mkdir -p "$manifest_dir" + echo "Sbom directory created." $manifest_dir +else + Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." +fi + +artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM" +echo "Artifact name before : "$artifact_name +# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts. +safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}" +echo "Artifact name after : "$safe_artifact_name +export ARTIFACT_NAME=$safe_artifact_name +echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name" + +exit 0 diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj new file mode 100644 index 0000000..d7f1858 --- /dev/null +++ b/eng/common/helixpublish.proj @@ -0,0 +1,26 @@ + + + + msbuild + + + + + %(Identity) + + + + + + $(WorkItemDirectory) + $(WorkItemCommand) + $(WorkItemTimeout) + + + + + + + + + diff --git a/eng/common/init-tools-native.cmd b/eng/common/init-tools-native.cmd new file mode 100644 index 0000000..438cd54 --- /dev/null +++ b/eng/common/init-tools-native.cmd @@ -0,0 +1,3 @@ +@echo off +powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*" +exit /b %ErrorLevel% \ No newline at end of file diff --git a/eng/common/init-tools-native.ps1 b/eng/common/init-tools-native.ps1 new file mode 100644 index 0000000..27ccdb9 --- /dev/null +++ b/eng/common/init-tools-native.ps1 @@ -0,0 +1,203 @@ +<# +.SYNOPSIS +Entry point script for installing native tools + +.DESCRIPTION +Reads $RepoRoot\global.json file to determine native assets to install +and executes installers for those tools + +.PARAMETER BaseUri +Base file directory or Url from which to acquire tool archives + +.PARAMETER InstallDirectory +Directory to install native toolset. This is a command-line override for the default +Install directory precedence order: +- InstallDirectory command-line override +- NETCOREENG_INSTALL_DIRECTORY environment variable +- (default) %USERPROFILE%/.netcoreeng/native + +.PARAMETER Clean +Switch specifying to not install anything, but cleanup native asset folders + +.PARAMETER Force +Clean and then install tools + +.PARAMETER DownloadRetries +Total number of retry attempts + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds + +.PARAMETER GlobalJsonFile +File path to global.json file + +.PARAMETER PathPromotion +Optional switch to enable either promote native tools specified in the global.json to the path (in Azure Pipelines) +or break the build if a native tool is not found on the path (on a local dev machine) + +.NOTES +#> +[CmdletBinding(PositionalBinding=$false)] +Param ( + [string] $BaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external', + [string] $InstallDirectory, + [switch] $Clean = $False, + [switch] $Force = $False, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30, + [string] $GlobalJsonFile, + [switch] $PathPromotion +) + +if (!$GlobalJsonFile) { + $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName 'global.json' +} + +Set-StrictMode -version 2.0 +$ErrorActionPreference='Stop' + +. $PSScriptRoot\pipeline-logging-functions.ps1 +Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1') + +try { + # Define verbose switch if undefined + $Verbose = $VerbosePreference -Eq 'Continue' + + $EngCommonBaseDir = Join-Path $PSScriptRoot 'native\' + $NativeBaseDir = $InstallDirectory + if (!$NativeBaseDir) { + $NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory + } + $Env:CommonLibrary_NativeInstallDir = $NativeBaseDir + $InstallBin = Join-Path $NativeBaseDir 'bin' + $InstallerPath = Join-Path $EngCommonBaseDir 'install-tool.ps1' + + # Process tools list + Write-Host "Processing $GlobalJsonFile" + If (-Not (Test-Path $GlobalJsonFile)) { + Write-Host "Unable to find '$GlobalJsonFile'" + exit 0 + } + $NativeTools = Get-Content($GlobalJsonFile) -Raw | + ConvertFrom-Json | + Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue + if ($NativeTools) { + if ($PathPromotion -eq $True) { + $ArcadeToolsDirectory = "$env:SYSTEMDRIVE\arcade-tools" + if (Test-Path $ArcadeToolsDirectory) { # if this directory exists, we should use native tools on machine + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + $InstalledTools = @{} + + if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) { + if ($ToolVersion -eq "latest") { + $ToolVersion = "" + } + $ToolDirectories = (Get-ChildItem -Path "$ArcadeToolsDirectory" -Filter "$ToolName-$ToolVersion*" | Sort-Object -Descending) + if ($ToolDirectories -eq $null) { + Write-Error "Unable to find directory for $ToolName $ToolVersion; please make sure the tool is installed on this image." + exit 1 + } + $ToolDirectory = $ToolDirectories[0] + $BinPathFile = "$($ToolDirectory.FullName)\binpath.txt" + if (-not (Test-Path -Path "$BinPathFile")) { + Write-Error "Unable to find binpath.txt in '$($ToolDirectory.FullName)' ($ToolName $ToolVersion); artifact is either installed incorrectly or is not a bootstrappable tool." + exit 1 + } + $BinPath = Get-Content "$BinPathFile" + $ToolPath = Convert-Path -Path $BinPath + Write-Host "Adding $ToolName to the path ($ToolPath)..." + Write-Host "##vso[task.prependpath]$ToolPath" + $env:PATH = "$ToolPath;$env:PATH" + $InstalledTools += @{ $ToolName = $ToolDirectory.FullName } + } + } + return $InstalledTools + } else { + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + + if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) { + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "$ToolName not found on path. Please install $ToolName $ToolVersion before proceeding." + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "If this is running on a build machine, the arcade-tools directory was not found, which means there's an error with the image." + } + } + exit 0 + } + } else { + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + $LocalInstallerArguments = @{ ToolName = "$ToolName" } + $LocalInstallerArguments += @{ InstallPath = "$InstallBin" } + $LocalInstallerArguments += @{ BaseUri = "$BaseUri" } + $LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" } + $LocalInstallerArguments += @{ Version = "$ToolVersion" } + + if ($Verbose) { + $LocalInstallerArguments += @{ Verbose = $True } + } + if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') { + if($Force) { + $LocalInstallerArguments += @{ Force = $True } + } + } + if ($Clean) { + $LocalInstallerArguments += @{ Clean = $True } + } + + Write-Verbose "Installing $ToolName version $ToolVersion" + Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'" + & $InstallerPath @LocalInstallerArguments + if ($LASTEXITCODE -Ne "0") { + $errMsg = "$ToolName installation failed" + if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) { + $showNativeToolsWarning = $true + if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) { + $showNativeToolsWarning = $false + } + if ($showNativeToolsWarning) { + Write-Warning $errMsg + } + $toolInstallationFailure = $true + } else { + # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482 + Write-Host $errMsg + exit 1 + } + } + } + + if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) { + # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482 + Write-Host 'Native tools bootstrap failed' + exit 1 + } + } + } + else { + Write-Host 'No native tools defined in global.json' + exit 0 + } + + if ($Clean) { + exit 0 + } + if (Test-Path $InstallBin) { + Write-Host 'Native tools are available from ' (Convert-Path -Path $InstallBin) + Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)" + return $InstallBin + } + elseif (-not ($PathPromotion)) { + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed' + exit 1 + } + exit 0 +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh new file mode 100644 index 0000000..3e6a8d6 --- /dev/null +++ b/eng/common/init-tools-native.sh @@ -0,0 +1,238 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external' +install_directory='' +clean=false +force=false +download_retries=5 +retry_wait_time_seconds=30 +global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json" +declare -a native_assets + +. $scriptroot/pipeline-logging-functions.sh +. $scriptroot/native/common-library.sh + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --baseuri) + base_uri=$2 + shift 2 + ;; + --installdirectory) + install_directory=$2 + shift 2 + ;; + --clean) + clean=true + shift 1 + ;; + --force) + force=true + shift 1 + ;; + --donotabortonfailure) + donotabortonfailure=true + shift 1 + ;; + --donotdisplaywarnings) + donotdisplaywarnings=true + shift 1 + ;; + --downloadretries) + download_retries=$2 + shift 2 + ;; + --retrywaittimeseconds) + retry_wait_time_seconds=$2 + shift 2 + ;; + --help) + echo "Common settings:" + echo " --installdirectory Directory to install native toolset." + echo " This is a command-line override for the default" + echo " Install directory precedence order:" + echo " - InstallDirectory command-line override" + echo " - NETCOREENG_INSTALL_DIRECTORY environment variable" + echo " - (default) %USERPROFILE%/.netcoreeng/native" + echo "" + echo " --clean Switch specifying not to install anything, but cleanup native asset folders" + echo " --donotabortonfailure Switch specifiying whether to abort native tools installation on failure" + echo " --donotdisplaywarnings Switch specifiying whether to display warnings during native tools installation on failure" + echo " --force Clean and then install tools" + echo " --help Print help and exit" + echo "" + echo "Advanced settings:" + echo " --baseuri Base URI for where to download native tools from" + echo " --downloadretries Number of times a download should be attempted" + echo " --retrywaittimeseconds Wait time between download attempts" + echo "" + exit 0 + ;; + esac +done + +function ReadGlobalJsonNativeTools { + # happy path: we have a proper JSON parsing tool `jq(1)` in PATH! + if command -v jq &> /dev/null; then + + # jq: read each key/value pair under "native-tools" entry and emit: + # KEY="" VALUE="" + # followed by a null byte. + # + # bash: read line with null byte delimeter and push to array (for later `eval`uation). + + while IFS= read -rd '' line; do + native_assets+=("$line") + done < <(jq -r '. | + select(has("native-tools")) | + ."native-tools" | + keys[] as $k | + @sh "KEY=\($k) VALUE=\(.[$k])\u0000"' "$global_json_file") + + return + fi + + # Warning: falling back to manually parsing JSON, which is not recommended. + + # Following routine matches the output and escaping logic of jq(1)'s @sh formatter used above. + # It has been tested with several weird strings with escaped characters in entries (key and value) + # and results were compared with the output of jq(1) in binary representation using xxd(1); + # just before the assignment to 'native_assets' array (above and below). + + # try to capture the section under "native-tools". + if [[ ! "$(cat "$global_json_file")" =~ \"native-tools\"[[:space:]\:\{]*([^\}]+) ]]; then + return + fi + + section="${BASH_REMATCH[1]}" + + parseStarted=0 + possibleEnd=0 + escaping=0 + escaped=0 + isKey=1 + + for (( i=0; i<${#section}; i++ )); do + char="${section:$i:1}" + if ! ((parseStarted)) && [[ "$char" =~ [[:space:],:] ]]; then continue; fi + + if ! ((escaping)) && [[ "$char" == "\\" ]]; then + escaping=1 + elif ((escaping)) && ! ((escaped)); then + escaped=1 + fi + + if ! ((parseStarted)) && [[ "$char" == "\"" ]]; then + parseStarted=1 + possibleEnd=0 + elif [[ "$char" == "'" ]]; then + token="$token'\\\''" + possibleEnd=0 + elif ((escaping)) || [[ "$char" != "\"" ]]; then + token="$token$char" + possibleEnd=1 + fi + + if ((possibleEnd)) && ! ((escaping)) && [[ "$char" == "\"" ]]; then + # Use printf to unescape token to match jq(1)'s @sh formatting rules. + # do not use 'token="$(printf "$token")"' syntax, as $() eats the trailing linefeed. + printf -v token "'$token'" + + if ((isKey)); then + KEY="$token" + isKey=0 + else + line="KEY=$KEY VALUE=$token" + native_assets+=("$line") + isKey=1 + fi + + # reset for next token + parseStarted=0 + token= + elif ((escaping)) && ((escaped)); then + escaping=0 + escaped=0 + fi + done +} + +native_base_dir=$install_directory +if [[ -z $install_directory ]]; then + native_base_dir=$(GetNativeInstallDirectory) +fi + +install_bin="${native_base_dir}/bin" +installed_any=false + +ReadGlobalJsonNativeTools + +if [[ ${#native_assets[@]} -eq 0 ]]; then + echo "No native tools defined in global.json" + exit 0; +else + native_installer_dir="$scriptroot/native" + for index in "${!native_assets[@]}"; do + eval "${native_assets["$index"]}" + + installer_path="$native_installer_dir/install-$KEY.sh" + installer_command="$installer_path" + installer_command+=" --baseuri $base_uri" + installer_command+=" --installpath $install_bin" + installer_command+=" --version $VALUE" + echo $installer_command + + if [[ $force = true ]]; then + installer_command+=" --force" + fi + + if [[ $clean = true ]]; then + installer_command+=" --clean" + fi + + if [[ -a $installer_path ]]; then + $installer_command + if [[ $? != 0 ]]; then + if [[ $donotabortonfailure = true ]]; then + if [[ $donotdisplaywarnings != true ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed" + fi + else + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed" + exit 1 + fi + else + $installed_any = true + fi + else + if [[ $donotabortonfailure == true ]]; then + if [[ $donotdisplaywarnings != true ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script" + fi + else + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script" + exit 1 + fi + fi + done +fi + +if [[ $clean = true ]]; then + exit 0 +fi + +if [[ -d $install_bin ]]; then + echo "Native tools are available from $install_bin" + echo "##vso[task.prependpath]$install_bin" +else + if [[ $installed_any = true ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Native tools install directory does not exist, installation failed" + exit 1 + fi +fi + +exit 0 diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1 new file mode 100644 index 0000000..92b7734 --- /dev/null +++ b/eng/common/internal-feed-operations.ps1 @@ -0,0 +1,132 @@ +param( + [Parameter(Mandatory=$true)][string] $Operation, + [string] $AuthToken, + [string] $CommitSha, + [string] $RepoName, + [switch] $IsFeedPrivate +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +. $PSScriptRoot\tools.ps1 + +# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed +# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in +# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified +# internal builds +function SetupCredProvider { + param( + [string] $AuthToken + ) + + # Install the Cred Provider NuGet plugin + Write-Host 'Setting up Cred Provider NuGet plugin in the agent...' + Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..." + + $url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1' + + Write-Host "Writing the contents of 'installcredprovider.ps1' locally..." + Invoke-WebRequest $url -OutFile installcredprovider.ps1 + + Write-Host 'Installing plugin...' + .\installcredprovider.ps1 -Force + + Write-Host "Deleting local copy of 'installcredprovider.ps1'..." + Remove-Item .\installcredprovider.ps1 + + if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) { + Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!' + ExitWithExitCode 1 + } + else { + Write-Host 'CredProvider plugin was installed correctly!' + } + + # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable + # feeds successfully + + $nugetConfigPath = Join-Path $RepoRoot "NuGet.config" + + if (-Not (Test-Path -Path $nugetConfigPath)) { + Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!' + ExitWithExitCode 1 + } + + $endpoints = New-Object System.Collections.ArrayList + $nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value} + + if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) { + foreach ($stableRestoreResource in $nugetConfigPackageSources) { + $trimmedResource = ([string]$stableRestoreResource).Trim() + [void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"}) + } + } + + if (($endpoints | Measure-Object).Count -gt 0) { + $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress + + # Create the environment variables the AzDo way + Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{ + 'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' + 'issecret' = 'false' + } + + # We don't want sessions cached since we will be updating the endpoints quite frequently + Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{ + 'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED' + 'issecret' = 'false' + } + } + else + { + Write-Host 'No internal endpoints found in NuGet.config' + } +} + +#Workaround for https://github.com/microsoft/msbuild/issues/4430 +function InstallDotNetSdkAndRestoreArcade { + $dotnetTempDir = Join-Path $RepoRoot "dotnet" + $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*) + $dotnet = "$dotnetTempDir\dotnet.exe" + $restoreProjPath = "$PSScriptRoot\restore.proj" + + Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..." + InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion" + + '' | Out-File "$restoreProjPath" + + & $dotnet restore $restoreProjPath + + Write-Host 'Arcade SDK restored!' + + if (Test-Path -Path $restoreProjPath) { + Remove-Item $restoreProjPath + } + + if (Test-Path -Path $dotnetTempDir) { + Remove-Item $dotnetTempDir -Recurse + } +} + +try { + Push-Location $PSScriptRoot + + if ($Operation -like 'setup') { + SetupCredProvider $AuthToken + } + elseif ($Operation -like 'install-restore') { + InstallDotNetSdkAndRestoreArcade + } + else { + Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!" + ExitWithExitCode 1 + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Arcade' -Message $_ + ExitWithExitCode 1 +} +finally { + Pop-Location +} diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh new file mode 100644 index 0000000..9378223 --- /dev/null +++ b/eng/common/internal-feed-operations.sh @@ -0,0 +1,141 @@ +#!/usr/bin/env bash + +set -e + +# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed +# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in +# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. +# This should ONLY be called from identified internal builds +function SetupCredProvider { + local authToken=$1 + + # Install the Cred Provider NuGet plugin + echo "Setting up Cred Provider NuGet plugin in the agent..."... + echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..." + + local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh" + + echo "Writing the contents of 'installcredprovider.ps1' locally..." + local installcredproviderPath="installcredprovider.sh" + if command -v curl > /dev/null; then + curl $url > "$installcredproviderPath" + else + wget -q -O "$installcredproviderPath" "$url" + fi + + echo "Installing plugin..." + . "$installcredproviderPath" + + echo "Deleting local copy of 'installcredprovider.sh'..." + rm installcredprovider.sh + + if [ ! -d "$HOME/.nuget/plugins" ]; then + Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!' + ExitWithExitCode 1 + else + echo "CredProvider plugin was installed correctly!" + fi + + # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable + # feeds successfully + + local nugetConfigPath="{$repo_root}NuGet.config" + + if [ ! "$nugetConfigPath" ]; then + Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!" + ExitWithExitCode 1 + fi + + local endpoints='[' + local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"` + local pattern="value=\"(.*)\"" + + for value in $nugetConfigPackageValues + do + if [[ $value =~ $pattern ]]; then + local endpoint="${BASH_REMATCH[1]}" + endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"}," + fi + done + + endpoints=${endpoints%?} + endpoints+=']' + + if [ ${#endpoints} -gt 2 ]; then + local endpointCredentials="{\"endpointCredentials\": "$endpoints"}" + + echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials" + echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False" + else + echo "No internal endpoints found in NuGet.config" + fi +} + +# Workaround for https://github.com/microsoft/msbuild/issues/4430 +function InstallDotNetSdkAndRestoreArcade { + local dotnetTempDir="$repo_root/dotnet" + local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*) + local restoreProjPath="$repo_root/eng/common/restore.proj" + + echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..." + echo "" > "$restoreProjPath" + + InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion" + + local res=`$dotnetTempDir/dotnet restore $restoreProjPath` + echo "Arcade SDK restored!" + + # Cleanup + if [ "$restoreProjPath" ]; then + rm "$restoreProjPath" + fi + + if [ "$dotnetTempDir" ]; then + rm -r $dotnetTempDir + fi +} + +source="${BASH_SOURCE[0]}" +operation='' +authToken='' +repoName='' + +while [[ $# > 0 ]]; do + opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + --operation) + operation=$2 + shift + ;; + --authtoken) + authToken=$2 + shift + ;; + *) + echo "Invalid argument: $1" + usage + exit 1 + ;; + esac + + shift +done + +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/tools.sh" + +if [ "$operation" = "setup" ]; then + SetupCredProvider $authToken +elif [ "$operation" = "install-restore" ]; then + InstallDotNetSdkAndRestoreArcade +else + echo "Unknown operation '$operation'!" +fi diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props new file mode 100644 index 0000000..dbf99d8 --- /dev/null +++ b/eng/common/internal/Directory.Build.props @@ -0,0 +1,4 @@ + + + + diff --git a/eng/common/internal/NuGet.config b/eng/common/internal/NuGet.config new file mode 100644 index 0000000..19d3d31 --- /dev/null +++ b/eng/common/internal/NuGet.config @@ -0,0 +1,7 @@ + + + + + + + diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj new file mode 100644 index 0000000..7f5ce6d --- /dev/null +++ b/eng/common/internal/Tools.csproj @@ -0,0 +1,30 @@ + + + + net472 + false + false + + + + + + + + + + + + + + https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json; + + + $(RestoreSources); + https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json; + + + + + + diff --git a/eng/common/loc/P22DotNetHtmlLocalization.lss b/eng/common/loc/P22DotNetHtmlLocalization.lss new file mode 100644 index 0000000..5d892d6 --- /dev/null +++ b/eng/common/loc/P22DotNetHtmlLocalization.lss @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1 new file mode 100644 index 0000000..f041e5d --- /dev/null +++ b/eng/common/msbuild.ps1 @@ -0,0 +1,28 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string] $verbosity = 'minimal', + [bool] $warnAsError = $true, + [bool] $nodeReuse = $true, + [switch] $ci, + [switch] $prepareMachine, + [switch] $excludePrereleaseVS, + [string] $msbuildEngine = $null, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs +) + +. $PSScriptRoot\tools.ps1 + +try { + if ($ci) { + $nodeReuse = $false + } + + MSBuild @extraArgs +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Build' -Message $_ + ExitWithExitCode 1 +} + +ExitWithExitCode 0 \ No newline at end of file diff --git a/eng/common/msbuild.sh b/eng/common/msbuild.sh new file mode 100644 index 0000000..20d3dad --- /dev/null +++ b/eng/common/msbuild.sh @@ -0,0 +1,58 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +verbosity='minimal' +warn_as_error=true +node_reuse=true +prepare_machine=false +extra_args='' + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --verbosity) + verbosity=$2 + shift 2 + ;; + --warnaserror) + warn_as_error=$2 + shift 2 + ;; + --nodereuse) + node_reuse=$2 + shift 2 + ;; + --ci) + ci=true + shift 1 + ;; + --preparemachine) + prepare_machine=true + shift 1 + ;; + *) + extra_args="$extra_args $1" + shift 1 + ;; + esac +done + +. "$scriptroot/tools.sh" + +if [[ "$ci" == true ]]; then + node_reuse=false +fi + +MSBuild $extra_args +ExitWithExitCode 0 diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1 new file mode 100644 index 0000000..ca38268 --- /dev/null +++ b/eng/common/native/CommonLibrary.psm1 @@ -0,0 +1,400 @@ +<# +.SYNOPSIS +Helper module to install an archive to a directory + +.DESCRIPTION +Helper module to download and extract an archive to a specified directory + +.PARAMETER Uri +Uri of artifact to download + +.PARAMETER InstallDirectory +Directory to extract artifact contents to + +.PARAMETER Force +Force download / extraction if file or contents already exist. Default = False + +.PARAMETER DownloadRetries +Total number of retry attempts. Default = 5 + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds. Default = 30 + +.NOTES +Returns False if download or extraction fail, True otherwise +#> +function DownloadAndExtract { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $Uri, + [Parameter(Mandatory=$True)] + [string] $InstallDirectory, + [switch] $Force = $False, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30 + ) + # Define verbose switch if undefined + $Verbose = $VerbosePreference -Eq "Continue" + + $TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri + + # Download native tool + $DownloadStatus = CommonLibrary\Get-File -Uri $Uri ` + -Path $TempToolPath ` + -DownloadRetries $DownloadRetries ` + -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` + -Force:$Force ` + -Verbose:$Verbose + + if ($DownloadStatus -Eq $False) { + Write-Error "Download failed from $Uri" + return $False + } + + # Extract native tool + $UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath ` + -OutputDirectory $InstallDirectory ` + -Force:$Force ` + -Verbose:$Verbose + + if ($UnzipStatus -Eq $False) { + # Retry Download one more time with Force=true + $DownloadRetryStatus = CommonLibrary\Get-File -Uri $Uri ` + -Path $TempToolPath ` + -DownloadRetries 1 ` + -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` + -Force:$True ` + -Verbose:$Verbose + + if ($DownloadRetryStatus -Eq $False) { + Write-Error "Last attempt of download failed as well" + return $False + } + + # Retry unzip again one more time with Force=true + $UnzipRetryStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath ` + -OutputDirectory $InstallDirectory ` + -Force:$True ` + -Verbose:$Verbose + if ($UnzipRetryStatus -Eq $False) + { + Write-Error "Last attempt of unzip failed as well" + # Clean up partial zips and extracts + if (Test-Path $TempToolPath) { + Remove-Item $TempToolPath -Force + } + if (Test-Path $InstallDirectory) { + Remove-Item $InstallDirectory -Force -Recurse + } + return $False + } + } + + return $True +} + +<# +.SYNOPSIS +Download a file, retry on failure + +.DESCRIPTION +Download specified file and retry if attempt fails + +.PARAMETER Uri +Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded + +.PARAMETER Path +Path to download or copy uri file to + +.PARAMETER Force +Overwrite existing file if present. Default = False + +.PARAMETER DownloadRetries +Total number of retry attempts. Default = 5 + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds Default = 30 + +#> +function Get-File { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $Uri, + [Parameter(Mandatory=$True)] + [string] $Path, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30, + [switch] $Force = $False + ) + $Attempt = 0 + + if ($Force) { + if (Test-Path $Path) { + Remove-Item $Path -Force + } + } + if (Test-Path $Path) { + Write-Host "File '$Path' already exists, skipping download" + return $True + } + + $DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent + if (-Not (Test-Path $DownloadDirectory)) { + New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null + } + + $TempPath = "$Path.tmp" + if (Test-Path -IsValid -Path $Uri) { + Write-Verbose "'$Uri' is a file path, copying temporarily to '$TempPath'" + Copy-Item -Path $Uri -Destination $TempPath + Write-Verbose "Moving temporary file to '$Path'" + Move-Item -Path $TempPath -Destination $Path + return $? + } + else { + Write-Verbose "Downloading $Uri" + # Don't display the console progress UI - it's a huge perf hit + $ProgressPreference = 'SilentlyContinue' + while($Attempt -Lt $DownloadRetries) + { + try { + Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $TempPath + Write-Verbose "Downloaded to temporary location '$TempPath'" + Move-Item -Path $TempPath -Destination $Path + Write-Verbose "Moved temporary file to '$Path'" + return $True + } + catch { + $Attempt++ + if ($Attempt -Lt $DownloadRetries) { + $AttemptsLeft = $DownloadRetries - $Attempt + Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds" + Start-Sleep -Seconds $RetryWaitTimeInSeconds + } + else { + Write-Error $_ + Write-Error $_.Exception + } + } + } + } + + return $False +} + +<# +.SYNOPSIS +Generate a shim for a native tool + +.DESCRIPTION +Creates a wrapper script (shim) that passes arguments forward to native tool assembly + +.PARAMETER ShimName +The name of the shim + +.PARAMETER ShimDirectory +The directory where shims are stored + +.PARAMETER ToolFilePath +Path to file that shim forwards to + +.PARAMETER Force +Replace shim if already present. Default = False + +.NOTES +Returns $True if generating shim succeeds, $False otherwise +#> +function New-ScriptShim { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $ShimName, + [Parameter(Mandatory=$True)] + [string] $ShimDirectory, + [Parameter(Mandatory=$True)] + [string] $ToolFilePath, + [Parameter(Mandatory=$True)] + [string] $BaseUri, + [switch] $Force + ) + try { + Write-Verbose "Generating '$ShimName' shim" + + if (-Not (Test-Path $ToolFilePath)){ + Write-Error "Specified tool file path '$ToolFilePath' does not exist" + return $False + } + + # WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs + # Many of the checks for installed programs expect a .exe extension for Windows tools, rather + # than a .bat or .cmd file. + # Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer + if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) { + $InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" ` + -InstallDirectory $ShimDirectory\WinShimmer ` + -Force:$Force ` + -DownloadRetries 2 ` + -RetryWaitTimeInSeconds 5 ` + -Verbose:$Verbose + } + + if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) { + Write-Host "$ShimName.exe already exists; replacing..." + Remove-Item (Join-Path $ShimDirectory "$ShimName.exe") + } + + & "$ShimDirectory\WinShimmer\winshimmer.exe" $ShimName $ToolFilePath $ShimDirectory + return $True + } + catch { + Write-Host $_ + Write-Host $_.Exception + return $False + } +} + +<# +.SYNOPSIS +Returns the machine architecture of the host machine + +.NOTES +Returns 'x64' on 64 bit machines + Returns 'x86' on 32 bit machines +#> +function Get-MachineArchitecture { + $ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE + $ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432 + if($ProcessorArchitecture -Eq "X86") + { + if(($ProcessorArchitectureW6432 -Eq "") -Or + ($ProcessorArchitectureW6432 -Eq "X86")) { + return "x86" + } + $ProcessorArchitecture = $ProcessorArchitectureW6432 + } + if (($ProcessorArchitecture -Eq "AMD64") -Or + ($ProcessorArchitecture -Eq "IA64") -Or + ($ProcessorArchitecture -Eq "ARM64") -Or + ($ProcessorArchitecture -Eq "LOONGARCH64")) { + return "x64" + } + return "x86" +} + +<# +.SYNOPSIS +Get the name of a temporary folder under the native install directory +#> +function Get-TempDirectory { + return Join-Path (Get-NativeInstallDirectory) "temp/" +} + +function Get-TempPathFilename { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $Path + ) + $TempDir = CommonLibrary\Get-TempDirectory + $TempFilename = Split-Path $Path -leaf + $TempPath = Join-Path $TempDir $TempFilename + return $TempPath +} + +<# +.SYNOPSIS +Returns the base directory to use for native tool installation + +.NOTES +Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable +is set, or otherwise returns an install directory under the %USERPROFILE% +#> +function Get-NativeInstallDirectory { + $InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY + if (!$InstallDir) { + $InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/" + } + return $InstallDir +} + +<# +.SYNOPSIS +Unzip an archive + +.DESCRIPTION +Powershell module to unzip an archive to a specified directory + +.PARAMETER ZipPath (Required) +Path to archive to unzip + +.PARAMETER OutputDirectory (Required) +Output directory for archive contents + +.PARAMETER Force +Overwrite output directory contents if they already exist + +.NOTES +- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True. +- Returns True if unzip operation is successful +- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory +- Returns False if unable to extract zip archive +#> +function Expand-Zip { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $ZipPath, + [Parameter(Mandatory=$True)] + [string] $OutputDirectory, + [switch] $Force + ) + + Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'" + try { + if ((Test-Path $OutputDirectory) -And (-Not $Force)) { + Write-Host "Directory '$OutputDirectory' already exists, skipping extract" + return $True + } + if (Test-Path $OutputDirectory) { + Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory" + Remove-Item $OutputDirectory -Force -Recurse + if ($? -Eq $False) { + Write-Error "Unable to remove '$OutputDirectory'" + return $False + } + } + + $TempOutputDirectory = Join-Path "$(Split-Path -Parent $OutputDirectory)" "$(Split-Path -Leaf $OutputDirectory).tmp" + if (Test-Path $TempOutputDirectory) { + Remove-Item $TempOutputDirectory -Force -Recurse + } + New-Item -Path $TempOutputDirectory -Force -ItemType "Directory" | Out-Null + + Add-Type -assembly "system.io.compression.filesystem" + [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$TempOutputDirectory") + if ($? -Eq $False) { + Write-Error "Unable to extract '$ZipPath'" + return $False + } + + Move-Item -Path $TempOutputDirectory -Destination $OutputDirectory + } + catch { + Write-Host $_ + Write-Host $_.Exception + + return $False + } + return $True +} + +export-modulemember -function DownloadAndExtract +export-modulemember -function Expand-Zip +export-modulemember -function Get-File +export-modulemember -function Get-MachineArchitecture +export-modulemember -function Get-NativeInstallDirectory +export-modulemember -function Get-TempDirectory +export-modulemember -function Get-TempPathFilename +export-modulemember -function New-ScriptShim diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh new file mode 100644 index 0000000..080c2c2 --- /dev/null +++ b/eng/common/native/common-library.sh @@ -0,0 +1,172 @@ +#!/usr/bin/env bash + +function GetNativeInstallDirectory { + local install_dir + + if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then + install_dir=$HOME/.netcoreeng/native/ + else + install_dir=$NETCOREENG_INSTALL_DIRECTORY + fi + + echo $install_dir + return 0 +} + +function GetTempDirectory { + + echo $(GetNativeInstallDirectory)temp/ + return 0 +} + +function ExpandZip { + local zip_path=$1 + local output_directory=$2 + local force=${3:-false} + + echo "Extracting $zip_path to $output_directory" + if [[ -d $output_directory ]] && [[ $force = false ]]; then + echo "Directory '$output_directory' already exists, skipping extract" + return 0 + fi + + if [[ -d $output_directory ]]; then + echo "'Force flag enabled, but '$output_directory' exists. Removing directory" + rm -rf $output_directory + if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'" + return 1 + fi + fi + + echo "Creating directory: '$output_directory'" + mkdir -p $output_directory + + echo "Extracting archive" + tar -xf $zip_path -C $output_directory + if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'" + return 1 + fi + + return 0 +} + +function GetCurrentOS { + local unameOut="$(uname -s)" + case $unameOut in + Linux*) echo "Linux";; + Darwin*) echo "MacOS";; + esac + return 0 +} + +function GetFile { + local uri=$1 + local path=$2 + local force=${3:-false} + local download_retries=${4:-5} + local retry_wait_time_seconds=${5:-30} + + if [[ -f $path ]]; then + if [[ $force = false ]]; then + echo "File '$path' already exists. Skipping download" + return 0 + else + rm -rf $path + fi + fi + + if [[ -f $uri ]]; then + echo "'$uri' is a file path, copying file to '$path'" + cp $uri $path + return $? + fi + + echo "Downloading $uri" + # Use curl if available, otherwise use wget + if command -v curl > /dev/null; then + curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail + else + wget -q -O "$path" "$uri" --tries="$download_retries" + fi + + return $? +} + +function GetTempPathFileName { + local path=$1 + + local temp_dir=$(GetTempDirectory) + local temp_file_name=$(basename $path) + echo $temp_dir$temp_file_name + return 0 +} + +function DownloadAndExtract { + local uri=$1 + local installDir=$2 + local force=${3:-false} + local download_retries=${4:-5} + local retry_wait_time_seconds=${5:-30} + + local temp_tool_path=$(GetTempPathFileName $uri) + + echo "downloading to: $temp_tool_path" + + # Download file + GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds + if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'." + return 1 + fi + + # Extract File + echo "extracting from $temp_tool_path to $installDir" + ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds + if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'." + return 1 + fi + + return 0 +} + +function NewScriptShim { + local shimpath=$1 + local tool_file_path=$2 + local force=${3:-false} + + echo "Generating '$shimpath' shim" + if [[ -f $shimpath ]]; then + if [[ $force = false ]]; then + echo "File '$shimpath' already exists." >&2 + return 1 + else + rm -rf $shimpath + fi + fi + + if [[ ! -f $tool_file_path ]]; then + # try to see if the path is lower cased + tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")" + if [[ ! -f $tool_file_path ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist" + return 1 + fi + fi + + local shim_contents=$'#!/usr/bin/env bash\n' + shim_contents+="SHIMARGS="$'$1\n' + shim_contents+="$tool_file_path"$' $SHIMARGS\n' + + # Write shim file + echo "$shim_contents" > $shimpath + + chmod +x $shimpath + + echo "Finished generating shim '$shimpath'" + + return $? +} + diff --git a/eng/common/native/find-native-compiler.sh b/eng/common/native/find-native-compiler.sh new file mode 100644 index 0000000..aed19d0 --- /dev/null +++ b/eng/common/native/find-native-compiler.sh @@ -0,0 +1,121 @@ +#!/usr/bin/env bash +# +# This file locates the native compiler with the given name and version and sets the environment variables to locate it. +# + +source="${BASH_SOURCE[0]}" + +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +if [ $# -lt 0 ] +then + echo "Usage..." + echo "find-native-compiler.sh " + echo "Specify the name of compiler (clang or gcc)." + echo "Specify the major version of compiler." + echo "Specify the minor version of compiler." + exit 1 +fi + +. $scriptroot/../pipeline-logging-functions.sh + +compiler="$1" +cxxCompiler="$compiler++" +majorVersion="$2" +minorVersion="$3" + +if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi + +check_version_exists() { + desired_version=-1 + + # Set up the environment to be used for building with the desired compiler. + if command -v "$compiler-$1.$2" > /dev/null; then + desired_version="-$1.$2" + elif command -v "$compiler$1$2" > /dev/null; then + desired_version="$1$2" + elif command -v "$compiler-$1$2" > /dev/null; then + desired_version="-$1$2" + fi + + echo "$desired_version" +} + +if [ -z "$CLR_CC" ]; then + + # Set default versions + if [ -z "$majorVersion" ]; then + # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero. + if [ "$compiler" = "clang" ]; then versions=( 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 ) + elif [ "$compiler" = "gcc" ]; then versions=( 9 8 7 6 5 4.9 ); fi + + for version in "${versions[@]}"; do + parts=(${version//./ }) + desired_version="$(check_version_exists "${parts[0]}" "${parts[1]}")" + if [ "$desired_version" != "-1" ]; then majorVersion="${parts[0]}"; break; fi + done + + if [ -z "$majorVersion" ]; then + if command -v "$compiler" > /dev/null; then + if [ "$(uname)" != "Darwin" ]; then + Write-PipelineTelemetryError -category "Build" -type "warning" "Specific version of $compiler not found, falling back to use the one in PATH." + fi + export CC="$(command -v "$compiler")" + export CXX="$(command -v "$cxxCompiler")" + else + Write-PipelineTelemetryError -category "Build" "No usable version of $compiler found." + exit 1 + fi + else + if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then + if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then + if command -v "$compiler" > /dev/null; then + Write-PipelineTelemetryError -category "Build" -type "warning" "Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH." + export CC="$(command -v "$compiler")" + export CXX="$(command -v "$cxxCompiler")" + else + Write-PipelineTelemetryError -category "Build" "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH." + exit 1 + fi + fi + fi + fi + else + desired_version="$(check_version_exists "$majorVersion" "$minorVersion")" + if [ "$desired_version" = "-1" ]; then + Write-PipelineTelemetryError -category "Build" "Could not find specific version of $compiler: $majorVersion $minorVersion." + exit 1 + fi + fi + + if [ -z "$CC" ]; then + export CC="$(command -v "$compiler$desired_version")" + export CXX="$(command -v "$cxxCompiler$desired_version")" + if [ -z "$CXX" ]; then export CXX="$(command -v "$cxxCompiler")"; fi + fi +else + if [ ! -f "$CLR_CC" ]; then + Write-PipelineTelemetryError -category "Build" "CLR_CC is set but path '$CLR_CC' does not exist" + exit 1 + fi + export CC="$CLR_CC" + export CXX="$CLR_CXX" +fi + +if [ -z "$CC" ]; then + Write-PipelineTelemetryError -category "Build" "Unable to find $compiler." + exit 1 +fi + +export CCC_CC="$CC" +export CCC_CXX="$CXX" +export SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")" diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh new file mode 100644 index 0000000..f5c1ec7 --- /dev/null +++ b/eng/common/native/init-compiler.sh @@ -0,0 +1,137 @@ +#!/bin/sh +# +# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables +# +# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here! + +if [ -z "$build_arch" ] || [ -z "$compiler" ]; then + echo "Usage..." + echo "build_arch= compiler= init-compiler.sh" + echo "Specify the target architecture." + echo "Specify the name of compiler (clang or gcc)." + exit 1 +fi + +case "$compiler" in + clang*|-clang*|--clang*) + # clangx.y or clang-x.y + version="$(echo "$compiler" | tr -d '[:alpha:]-=')" + majorVersion="${version%%.*}" + [ -z "${version##*.*}" ] && minorVersion="${version#*.}" + + if [ -z "$minorVersion" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -le 6 ]; then + minorVersion=0; + fi + compiler=clang + ;; + + gcc*|-gcc*|--gcc*) + # gccx.y or gcc-x.y + version="$(echo "$compiler" | tr -d '[:alpha:]-=')" + majorVersion="${version%%.*}" + [ -z "${version##*.*}" ] && minorVersion="${version#*.}" + compiler=gcc + ;; +esac + +cxxCompiler="$compiler++" + +# clear the existing CC and CXX from environment +CC= +CXX= +LDFLAGS= + +if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi + +check_version_exists() { + desired_version=-1 + + # Set up the environment to be used for building with the desired compiler. + if command -v "$compiler-$1.$2" > /dev/null; then + desired_version="-$1.$2" + elif command -v "$compiler$1$2" > /dev/null; then + desired_version="$1$2" + elif command -v "$compiler-$1$2" > /dev/null; then + desired_version="-$1$2" + fi + + echo "$desired_version" +} + +if [ -z "$CLR_CC" ]; then + + # Set default versions + if [ -z "$majorVersion" ]; then + # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero. + if [ "$compiler" = "clang" ]; then versions="17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5" + elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi + + for version in $versions; do + _major="${version%%.*}" + [ -z "${version##*.*}" ] && _minor="${version#*.}" + desired_version="$(check_version_exists "$_major" "$_minor")" + if [ "$desired_version" != "-1" ]; then majorVersion="$_major"; break; fi + done + + if [ -z "$majorVersion" ]; then + if command -v "$compiler" > /dev/null; then + if [ "$(uname)" != "Darwin" ]; then + echo "Warning: Specific version of $compiler not found, falling back to use the one in PATH." + fi + CC="$(command -v "$compiler")" + CXX="$(command -v "$cxxCompiler")" + else + echo "No usable version of $compiler found." + exit 1 + fi + else + if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then + if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then + if command -v "$compiler" > /dev/null; then + echo "Warning: Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH." + CC="$(command -v "$compiler")" + CXX="$(command -v "$cxxCompiler")" + else + echo "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH." + exit 1 + fi + fi + fi + fi + else + desired_version="$(check_version_exists "$majorVersion" "$minorVersion")" + if [ "$desired_version" = "-1" ]; then + echo "Could not find specific version of $compiler: $majorVersion $minorVersion." + exit 1 + fi + fi + + if [ -z "$CC" ]; then + CC="$(command -v "$compiler$desired_version")" + CXX="$(command -v "$cxxCompiler$desired_version")" + if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler")"; fi + fi +else + if [ ! -f "$CLR_CC" ]; then + echo "CLR_CC is set but path '$CLR_CC' does not exist" + exit 1 + fi + CC="$CLR_CC" + CXX="$CLR_CXX" +fi + +if [ -z "$CC" ]; then + echo "Unable to find $compiler." + exit 1 +fi + +# Only lld version >= 9 can be considered stable. lld doesn't support s390x. +if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && [ "$build_arch" != "s390x" ]; then + if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then + LDFLAGS="-fuse-ld=lld" + fi +fi + +SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")" + +export CC CXX LDFLAGS SCAN_BUILD_COMMAND diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh new file mode 100644 index 0000000..de1687b --- /dev/null +++ b/eng/common/native/init-distro-rid.sh @@ -0,0 +1,130 @@ +#!/usr/bin/env bash + +# getNonPortableDistroRid +# +# Input: +# targetOs: (str) +# targetArch: (str) +# rootfsDir: (str) +# +# Return: +# non-portable rid +getNonPortableDistroRid() +{ + local targetOs="$1" + local targetArch="$2" + local rootfsDir="$3" + local nonPortableRid="" + + if [ "$targetOs" = "linux" ]; then + if [ -e "${rootfsDir}/etc/os-release" ]; then + source "${rootfsDir}/etc/os-release" + + if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then + # remove the last version digit + VERSION_ID="${VERSION_ID%.*}" + fi + + if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then + nonPortableRid="${ID}.${VERSION_ID}-${targetArch}" + else + # Rolling release distros either do not set VERSION_ID, set it as blank or + # set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux); + # so omit it here to be consistent with everything else. + nonPortableRid="${ID}-${targetArch}" + fi + + elif [ -e "${rootfsDir}/android_platform" ]; then + source "$rootfsDir"/android_platform + nonPortableRid="$RID" + fi + fi + + if [ "$targetOs" = "freebsd" ]; then + # $rootfsDir can be empty. freebsd-version is shell script and it should always work. + __freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; }) + nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}" + elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then + __android_sdk_version=$(getprop ro.build.version.sdk) + nonPortableRid="android.$__android_sdk_version-${targetArch}" + elif [ "$targetOs" = "illumos" ]; then + __uname_version=$(uname -v) + case "$__uname_version" in + omnios-*) + __omnios_major_version=$(echo "${__uname_version:8:2}") + nonPortableRid=omnios."$__omnios_major_version"-"$targetArch" + ;; + joyent_*) + __smartos_major_version=$(echo "${__uname_version:7:4}") + nonPortableRid=smartos."$__smartos_major_version"-"$targetArch" + ;; + illumos_*) + nonPortableRid=openindiana-"$targetArch" + ;; + esac + elif [ "$targetOs" = "solaris" ]; then + __uname_version=$(uname -v) + __solaris_major_version=$(echo "${__uname_version%.*}") + nonPortableRid=solaris."$__solaris_major_version"-"$targetArch" + elif [ "$targetOs" = "haiku" ]; then + __uname_release=$(uname -r) + nonPortableRid=haiku.r"$__uname_release"-"$targetArch" + fi + + echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')" +} + +# initDistroRidGlobal +# +# Input: +# os: (str) +# arch: (str) +# rootfsDir?: (nullable:string) +# +# Return: +# None +# +# Notes: +# +# It is important to note that the function does not return anything, but it +# exports the following variables on success: +# +# __DistroRid : Non-portable rid of the target platform. +# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform. +# +initDistroRidGlobal() +{ + local targetOs="$1" + local targetArch="$2" + local rootfsDir="" + if [ "$#" -ge 3 ]; then + rootfsDir="$3" + fi + + if [ -n "${rootfsDir}" ]; then + # We may have a cross build. Check for the existence of the rootfsDir + if [ ! -e "${rootfsDir}" ]; then + echo "Error rootfsDir has been passed, but the location is not valid." + exit 1 + fi + fi + + __DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}") + + if [ -z "${__PortableTargetOS:-}" ]; then + __PortableTargetOS="$targetOs" + + STRINGS="$(command -v strings || true)" + if [ -z "$STRINGS" ]; then + STRINGS="$(command -v llvm-strings || true)" + fi + + # Check for musl-based distros (e.g Alpine Linux, Void Linux). + if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl || + ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then + __PortableTargetOS="linux-musl" + fi + fi + + export __DistroRid __PortableTargetOS +} diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh new file mode 100644 index 0000000..e693617 --- /dev/null +++ b/eng/common/native/init-os-and-arch.sh @@ -0,0 +1,80 @@ +#!/usr/bin/env bash + +# Use uname to determine what the OS is. +OSName=$(uname -s | tr '[:upper:]' '[:lower:]') + +if command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then + OSName="android" +fi + +case "$OSName" in +freebsd|linux|netbsd|openbsd|sunos|android|haiku) + os="$OSName" ;; +darwin) + os=osx ;; +*) + echo "Unsupported OS $OSName detected!" + exit 1 ;; +esac + +# On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html +# and `uname -p` returns processor type (e.g. i386 on amd64). +# The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html. +if [ "$os" = "sunos" ]; then + if uname -o 2>&1 | grep -q illumos; then + os="illumos" + else + os="solaris" + fi + CPUName=$(isainfo -n) +else + # For the rest of the operating systems, use uname(1) to determine what the CPU is. + CPUName=$(uname -m) +fi + +case "$CPUName" in + arm64|aarch64) + arch=arm64 + ;; + + loongarch64) + arch=loongarch64 + ;; + + riscv64) + arch=riscv64 + ;; + + amd64|x86_64) + arch=x64 + ;; + + armv7l|armv8l) + if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then + arch=armel + else + arch=arm + fi + ;; + + armv6l) + arch=armv6 + ;; + + i[3-6]86) + echo "Unsupported CPU $CPUName detected, build might not succeed!" + arch=x86 + ;; + + s390x) + arch=s390x + ;; + + ppc64le) + arch=ppc64le + ;; + *) + echo "Unknown CPU $CPUName detected!" + exit 1 + ;; +esac diff --git a/eng/common/native/install-cmake-test.sh b/eng/common/native/install-cmake-test.sh new file mode 100644 index 0000000..8a5e7cf --- /dev/null +++ b/eng/common/native/install-cmake-test.sh @@ -0,0 +1,117 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. $scriptroot/common-library.sh + +base_uri= +install_path= +version= +clean=false +force=false +download_retries=5 +retry_wait_time_seconds=30 + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --baseuri) + base_uri=$2 + shift 2 + ;; + --installpath) + install_path=$2 + shift 2 + ;; + --version) + version=$2 + shift 2 + ;; + --clean) + clean=true + shift 1 + ;; + --force) + force=true + shift 1 + ;; + --downloadretries) + download_retries=$2 + shift 2 + ;; + --retrywaittimeseconds) + retry_wait_time_seconds=$2 + shift 2 + ;; + --help) + echo "Common settings:" + echo " --baseuri Base file directory or Url wrom which to acquire tool archives" + echo " --installpath Base directory to install native tool to" + echo " --clean Don't install the tool, just clean up the current install of the tool" + echo " --force Force install of tools even if they previously exist" + echo " --help Print help and exit" + echo "" + echo "Advanced settings:" + echo " --downloadretries Total number of retry attempts" + echo " --retrywaittimeseconds Wait time between retry attempts in seconds" + echo "" + exit 0 + ;; + esac +done + +tool_name="cmake-test" +tool_os=$(GetCurrentOS) +tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")" +tool_arch="x86_64" +tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch" +tool_install_directory="$install_path/$tool_name/$version" +tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name" +shim_path="$install_path/$tool_name.sh" +uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz" + +# Clean up tool and installers +if [[ $clean = true ]]; then + echo "Cleaning $tool_install_directory" + if [[ -d $tool_install_directory ]]; then + rm -rf $tool_install_directory + fi + + echo "Cleaning $shim_path" + if [[ -f $shim_path ]]; then + rm -rf $shim_path + fi + + tool_temp_path=$(GetTempPathFileName $uri) + echo "Cleaning $tool_temp_path" + if [[ -f $tool_temp_path ]]; then + rm -rf $tool_temp_path + fi + + exit 0 +fi + +# Install tool +if [[ -f $tool_file_path ]] && [[ $force = false ]]; then + echo "$tool_name ($version) already exists, skipping install" + exit 0 +fi + +DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds + +if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed' + exit 1 +fi + +# Generate Shim +# Always rewrite shims so that we are referencing the expected version +NewScriptShim $shim_path $tool_file_path true + +if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed' + exit 1 +fi + +exit 0 diff --git a/eng/common/native/install-cmake.sh b/eng/common/native/install-cmake.sh new file mode 100644 index 0000000..de496be --- /dev/null +++ b/eng/common/native/install-cmake.sh @@ -0,0 +1,117 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. $scriptroot/common-library.sh + +base_uri= +install_path= +version= +clean=false +force=false +download_retries=5 +retry_wait_time_seconds=30 + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --baseuri) + base_uri=$2 + shift 2 + ;; + --installpath) + install_path=$2 + shift 2 + ;; + --version) + version=$2 + shift 2 + ;; + --clean) + clean=true + shift 1 + ;; + --force) + force=true + shift 1 + ;; + --downloadretries) + download_retries=$2 + shift 2 + ;; + --retrywaittimeseconds) + retry_wait_time_seconds=$2 + shift 2 + ;; + --help) + echo "Common settings:" + echo " --baseuri Base file directory or Url wrom which to acquire tool archives" + echo " --installpath Base directory to install native tool to" + echo " --clean Don't install the tool, just clean up the current install of the tool" + echo " --force Force install of tools even if they previously exist" + echo " --help Print help and exit" + echo "" + echo "Advanced settings:" + echo " --downloadretries Total number of retry attempts" + echo " --retrywaittimeseconds Wait time between retry attempts in seconds" + echo "" + exit 0 + ;; + esac +done + +tool_name="cmake" +tool_os=$(GetCurrentOS) +tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")" +tool_arch="x86_64" +tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch" +tool_install_directory="$install_path/$tool_name/$version" +tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name" +shim_path="$install_path/$tool_name.sh" +uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz" + +# Clean up tool and installers +if [[ $clean = true ]]; then + echo "Cleaning $tool_install_directory" + if [[ -d $tool_install_directory ]]; then + rm -rf $tool_install_directory + fi + + echo "Cleaning $shim_path" + if [[ -f $shim_path ]]; then + rm -rf $shim_path + fi + + tool_temp_path=$(GetTempPathFileName $uri) + echo "Cleaning $tool_temp_path" + if [[ -f $tool_temp_path ]]; then + rm -rf $tool_temp_path + fi + + exit 0 +fi + +# Install tool +if [[ -f $tool_file_path ]] && [[ $force = false ]]; then + echo "$tool_name ($version) already exists, skipping install" + exit 0 +fi + +DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds + +if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed' + exit 1 +fi + +# Generate Shim +# Always rewrite shims so that we are referencing the expected version +NewScriptShim $shim_path $tool_file_path true + +if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed' + exit 1 +fi + +exit 0 diff --git a/eng/common/native/install-tool.ps1 b/eng/common/native/install-tool.ps1 new file mode 100644 index 0000000..78f2d84 --- /dev/null +++ b/eng/common/native/install-tool.ps1 @@ -0,0 +1,132 @@ +<# +.SYNOPSIS +Install native tool + +.DESCRIPTION +Install cmake native tool from Azure blob storage + +.PARAMETER InstallPath +Base directory to install native tool to + +.PARAMETER BaseUri +Base file directory or Url from which to acquire tool archives + +.PARAMETER CommonLibraryDirectory +Path to folder containing common library modules + +.PARAMETER Force +Force install of tools even if they previously exist + +.PARAMETER Clean +Don't install the tool, just clean up the current install of the tool + +.PARAMETER DownloadRetries +Total number of retry attempts + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds + +.NOTES +Returns 0 if install succeeds, 1 otherwise +#> +[CmdletBinding(PositionalBinding=$false)] +Param ( + [Parameter(Mandatory=$True)] + [string] $ToolName, + [Parameter(Mandatory=$True)] + [string] $InstallPath, + [Parameter(Mandatory=$True)] + [string] $BaseUri, + [Parameter(Mandatory=$True)] + [string] $Version, + [string] $CommonLibraryDirectory = $PSScriptRoot, + [switch] $Force = $False, + [switch] $Clean = $False, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30 +) + +. $PSScriptRoot\..\pipeline-logging-functions.ps1 + +# Import common library modules +Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1") + +try { + # Define verbose switch if undefined + $Verbose = $VerbosePreference -Eq "Continue" + + $Arch = CommonLibrary\Get-MachineArchitecture + $ToolOs = "win64" + if($Arch -Eq "x32") { + $ToolOs = "win32" + } + $ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch" + $ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\" + $Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip" + $ShimPath = Join-Path $InstallPath "$ToolName.exe" + + if ($Clean) { + Write-Host "Cleaning $ToolInstallDirectory" + if (Test-Path $ToolInstallDirectory) { + Remove-Item $ToolInstallDirectory -Force -Recurse + } + Write-Host "Cleaning $ShimPath" + if (Test-Path $ShimPath) { + Remove-Item $ShimPath -Force + } + $ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri + Write-Host "Cleaning $ToolTempPath" + if (Test-Path $ToolTempPath) { + Remove-Item $ToolTempPath -Force + } + exit 0 + } + + # Install tool + if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) { + Write-Verbose "$ToolName ($Version) already exists, skipping install" + } + else { + $InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri ` + -InstallDirectory $ToolInstallDirectory ` + -Force:$Force ` + -DownloadRetries $DownloadRetries ` + -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` + -Verbose:$Verbose + + if ($InstallStatus -Eq $False) { + Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping" + exit 1 + } + } + + $ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName } + if (@($ToolFilePath).Length -Gt 1) { + Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))" + exit 1 + } elseif (@($ToolFilePath).Length -Lt 1) { + Write-Host "$ToolName was not found in $ToolInstallDirectory." + exit 1 + } + + # Generate shim + # Always rewrite shims so that we are referencing the expected version + $GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName ` + -ShimDirectory $InstallPath ` + -ToolFilePath "$ToolFilePath" ` + -BaseUri $BaseUri ` + -Force:$Force ` + -Verbose:$Verbose + + if ($GenerateShimStatus -Eq $False) { + Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping" + return 1 + } + + exit 0 +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_ + exit 1 +} diff --git a/eng/common/performance/blazor_perf.proj b/eng/common/performance/blazor_perf.proj new file mode 100644 index 0000000..3b25359 --- /dev/null +++ b/eng/common/performance/blazor_perf.proj @@ -0,0 +1,30 @@ + + + python3 + $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk + + + + + %(Identity) + + + + + %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\ + $(ScenarioDirectory)blazor\ + + + $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/ + $(ScenarioDirectory)blazor/ + + + + + $(WorkItemDirectory) + cd $(BlazorDirectory);$(Python) pre.py publish --msbuild %27/p:_TrimmerDumpDependencies=true%27 --msbuild-static AdditionalMonoLinkerOptions=%27"%24(AdditionalMonoLinkerOptions) --dump-dependencies"%27 --binlog %27./traces/blazor_publish.binlog%27 + $(Python) test.py sod --scenario-name "%(Identity)" + $(Python) post.py + + + \ No newline at end of file diff --git a/eng/common/performance/crossgen_perf.proj b/eng/common/performance/crossgen_perf.proj new file mode 100644 index 0000000..4264920 --- /dev/null +++ b/eng/common/performance/crossgen_perf.proj @@ -0,0 +1,69 @@ + + + + + %(Identity) + + + + + + py -3 + $(HelixPreCommands) + %HELIX_CORRELATION_PAYLOAD%\Core_Root + %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\ + $(ScenarioDirectory)crossgen\ + $(ScenarioDirectory)crossgen2\ + + + python3 + $(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/startup/Startup;chmod +x $HELIX_WORKITEM_PAYLOAD/startup/perfcollect;sudo apt update + $HELIX_CORRELATION_PAYLOAD/Core_Root + $HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/ + $(ScenarioDirectory)crossgen/ + $(ScenarioDirectory)crossgen2/ + + + + + + + + + + + + + + + + $(WorkItemDirectory) + $(Python) $(CrossgenDirectory)test.py crossgen --core-root $(CoreRoot) --test-name %(Identity) + + + + + + $(WorkItemDirectory) + $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity) + + + + + + + 4:00 + + + + 4:00 + + + $(WorkItemDirectory) + $(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --composite $(Crossgen2Directory)framework-r2r.dll.rsp + 1:00 + + + \ No newline at end of file diff --git a/eng/common/performance/microbenchmarks.proj b/eng/common/performance/microbenchmarks.proj new file mode 100644 index 0000000..94b6efb --- /dev/null +++ b/eng/common/performance/microbenchmarks.proj @@ -0,0 +1,144 @@ + + + + %HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj) + --dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP% + py -3 + %HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe + %HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe + + $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD% + %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts + %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline + %HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj + %HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe + %25%25 + %HELIX_WORKITEM_ROOT%\testResults.xml + + + + $HELIX_CORRELATION_PAYLOAD + $(BaseDirectory)/performance + + + + $HELIX_WORKITEM_PAYLOAD + $(BaseDirectory) + + + + $(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj) + --dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP + python3 + $(BaseDirectory)/Core_Root/corerun + $(BaseDirectory)/Baseline_Core_Root/corerun + $(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh + $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts + $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline + $(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj + $(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet + %25 + $HELIX_WORKITEM_ROOT/testResults.xml + + + + $(CliArguments) --wasm + + + + --corerun %HELIX_CORRELATION_PAYLOAD%\dotnet-mono\shared\Microsoft.NETCore.App\6.0.0\corerun.exe + + + --corerun $(BaseDirectory)/dotnet-mono/shared/Microsoft.NETCore.App/6.0.0/corerun + + + + --corerun $(CoreRun) + + + + --corerun $(BaselineCoreRun) + + + + $(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments) + + + + $(WorkItemCommand) $(CliArguments) + + + + 2:30 + 0:15 + + + + + %(Identity) + + + + + 30 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + false + + + + + + $(WorkItemDirectory) + $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)" + $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)" + $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand) + $(WorkItemTimeout) + + + + + + $(WorkItemDirectory) + $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)" + $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)" + $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults) + 4:00 + + + \ No newline at end of file diff --git a/eng/common/performance/perfhelixpublish.proj b/eng/common/performance/perfhelixpublish.proj new file mode 100644 index 0000000..cf5941e --- /dev/null +++ b/eng/common/performance/perfhelixpublish.proj @@ -0,0 +1,121 @@ + + + + %HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj) + --dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP% + py -3 + %HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe + %HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe + $(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD% + %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts + %HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline + %HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj + %HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe + %25%25 + %HELIX_WORKITEM_ROOT%\testResults.xml + + + + $HELIX_CORRELATION_PAYLOAD + $(BaseDirectory)/performance + + + + $HELIX_WORKITEM_PAYLOAD + $(BaseDirectory) + + + + $(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj) + --dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP + python3 + $(BaseDirectory)/Core_Root/corerun + $(BaseDirectory)/Baseline_Core_Root/corerun + $(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh + $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts + $(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline + $(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj + $(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet + %25 + $HELIX_WORKITEM_ROOT/testResults.xml + + + + --corerun $(CoreRun) + + + + --corerun $(BaselineCoreRun) + + + + $(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments) + + + + $(WorkItemCommand) $(CliArguments) + + + + + %(Identity) + + + + + 5 + + + + + + + + + + + false + + + + + + $(WorkItemDirectory) + $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)" + $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)" + $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand) + 4:00 + + + + + + $(WorkItemDirectory) + $(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)" + $(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)" + $(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults) + 4:00 + + + + + + $(WorkItemDirectory)\ScenarioCorrelation + $(Python) %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\crossgen\test.py crossgen --test-name System.Private.Xml.dll --core-root %HELIX_CORRELATION_PAYLOAD%\Core_Root + + + $(WorkItemDirectory)\ScenarioCorrelation + $(Python) %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\crossgen\test.py crossgen --test-name System.Linq.Expressions.dll --core-root %HELIX_CORRELATION_PAYLOAD%\Core_Root + + + $(WorkItemDirectory)\ScenarioCorrelation + $(Python) %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\crossgen\test.py crossgen --test-name Microsoft.CodeAnalysis.VisualBasic.dll --core-root %HELIX_CORRELATION_PAYLOAD%\Core_Root + + + $(WorkItemDirectory)\ScenarioCorrelation + $(Python) %HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\crossgen\test.py crossgen --test-name Microsoft.CodeAnalysis.CSharp.dll --core-root %HELIX_CORRELATION_PAYLOAD%\Core_Root + + + \ No newline at end of file diff --git a/eng/common/performance/performance-setup.ps1 b/eng/common/performance/performance-setup.ps1 new file mode 100644 index 0000000..656c0bd --- /dev/null +++ b/eng/common/performance/performance-setup.ps1 @@ -0,0 +1,147 @@ +Param( + [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, + [string] $CoreRootDirectory, + [string] $BaselineCoreRootDirectory, + [string] $Architecture="x64", + [string] $Framework="net5.0", + [string] $CompilationMode="Tiered", + [string] $Repository=$env:BUILD_REPOSITORY_NAME, + [string] $Branch=$env:BUILD_SOURCEBRANCH, + [string] $CommitSha=$env:BUILD_SOURCEVERSION, + [string] $BuildNumber=$env:BUILD_BUILDNUMBER, + [string] $RunCategories="Libraries Runtime", + [string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj", + [string] $Kind="micro", + [switch] $LLVM, + [switch] $MonoInterpreter, + [switch] $MonoAOT, + [switch] $Internal, + [switch] $Compare, + [string] $MonoDotnet="", + [string] $Configurations="CompilationMode=$CompilationMode RunKind=$Kind" +) + +$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance") +$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty) +$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty) + +$PayloadDirectory = (Join-Path $SourceDirectory "Payload") +$PerformanceDirectory = (Join-Path $PayloadDirectory "performance") +$WorkItemDirectory = (Join-Path $SourceDirectory "workitem") +$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true" +$Creator = $env:BUILD_DEFINITIONNAME +$PerfLabArguments = "" +$HelixSourcePrefix = "pr" + +$Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open" + +# TODO: Implement a better logic to determine if Framework is .NET Core or >= .NET 5. +if ($Framework.StartsWith("netcoreapp") -or ($Framework -eq "net5.0")) { + $Queue = "Windows.10.Amd64.ClientRS5.Open" +} + +if ($Compare) { + $Queue = "Windows.10.Amd64.19H1.Tiger.Perf.Open" + $PerfLabArguments = "" + $ExtraBenchmarkDotNetArguments = "" +} + +if ($Internal) { + $Queue = "Windows.10.Amd64.19H1.Tiger.Perf" + $PerfLabArguments = "--upload-to-perflab-container" + $ExtraBenchmarkDotNetArguments = "" + $Creator = "" + $HelixSourcePrefix = "official" +} + +if($MonoInterpreter) +{ + $ExtraBenchmarkDotNetArguments = "--category-exclusion-filter NoInterpreter" +} + +if($MonoDotnet -ne "") +{ + $Configurations += " LLVM=$LLVM MonoInterpreter=$MonoInterpreter MonoAOT=$MonoAOT" + if($ExtraBenchmarkDotNetArguments -eq "") + { + #FIX ME: We need to block these tests as they don't run on mono for now + $ExtraBenchmarkDotNetArguments = "--exclusion-filter *Perf_Image* *Perf_NamedPipeStream*" + } + else + { + #FIX ME: We need to block these tests as they don't run on mono for now + $ExtraBenchmarkDotNetArguments += " --exclusion-filter *Perf_Image* *Perf_NamedPipeStream*" + } +} + +# FIX ME: This is a workaround until we get this from the actual pipeline +$CommonSetupArguments="--channel master --queue $Queue --build-number $BuildNumber --build-configs $Configurations --architecture $Architecture" +$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments" + + +#This grabs the LKG version number of dotnet and passes it to our scripts +$VersionJSON = Get-Content global.json | ConvertFrom-Json +$DotNetVersion = $VersionJSON.tools.dotnet +$SetupArguments = "--dotnet-versions $DotNetVersion $SetupArguments" + + +if ($RunFromPerformanceRepo) { + $SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments" + + robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git +} +else { + git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory +} + +if($MonoDotnet -ne "") +{ + $UsingMono = "true" + $MonoDotnetPath = (Join-Path $PayloadDirectory "dotnet-mono") + Move-Item -Path $MonoDotnet -Destination $MonoDotnetPath +} + +if ($UseCoreRun) { + $NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root") + Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot +} +if ($UseBaselineCoreRun) { + $NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root") + Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot +} + +$DocsDir = (Join-Path $PerformanceDirectory "docs") +robocopy $DocsDir $WorkItemDirectory + +# Set variables that we will need to have in future steps +$ci = $true + +. "$PSScriptRoot\..\pipeline-logging-functions.ps1" + +# Directories +Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false + +# Script Arguments +Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'MonoDotnet' -Value "$UsingMono" -IsMultiJobVariable $false + +# Helix Arguments +Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false +Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false + +exit 0 \ No newline at end of file diff --git a/eng/common/performance/performance-setup.sh b/eng/common/performance/performance-setup.sh new file mode 100644 index 0000000..99d1b7b --- /dev/null +++ b/eng/common/performance/performance-setup.sh @@ -0,0 +1,289 @@ +#!/usr/bin/env bash + +source_directory=$BUILD_SOURCESDIRECTORY +core_root_directory= +baseline_core_root_directory= +architecture=x64 +framework=net5.0 +compilation_mode=tiered +repository=$BUILD_REPOSITORY_NAME +branch=$BUILD_SOURCEBRANCH +commit_sha=$BUILD_SOURCEVERSION +build_number=$BUILD_BUILDNUMBER +internal=false +compare=false +mono_dotnet= +kind="micro" +llvm=false +monointerpreter=false +monoaot=false +run_categories="Libraries Runtime" +csproj="src\benchmarks\micro\MicroBenchmarks.csproj" +configurations="CompliationMode=$compilation_mode RunKind=$kind" +run_from_perf_repo=false +use_core_run=true +use_baseline_core_run=true +using_mono=false +wasm_runtime_loc= +using_wasm=false +use_latest_dotnet=false + +while (($# > 0)); do + lowerI="$(echo $1 | awk '{print tolower($0)}')" + case $lowerI in + --sourcedirectory) + source_directory=$2 + shift 2 + ;; + --corerootdirectory) + core_root_directory=$2 + shift 2 + ;; + --baselinecorerootdirectory) + baseline_core_root_directory=$2 + shift 2 + ;; + --architecture) + architecture=$2 + shift 2 + ;; + --framework) + framework=$2 + shift 2 + ;; + --compilationmode) + compilation_mode=$2 + shift 2 + ;; + --repository) + repository=$2 + shift 2 + ;; + --branch) + branch=$2 + shift 2 + ;; + --commitsha) + commit_sha=$2 + shift 2 + ;; + --buildnumber) + build_number=$2 + shift 2 + ;; + --kind) + kind=$2 + configurations="CompilationMode=$compilation_mode RunKind=$kind" + shift 2 + ;; + --runcategories) + run_categories=$2 + shift 2 + ;; + --csproj) + csproj=$2 + shift 2 + ;; + --internal) + internal=true + shift 1 + ;; + --llvm) + llvm=true + shift 1 + ;; + --monointerpreter) + monointerpreter=true + shift 1 + ;; + --monoaot) + monoaot=true + shift 1 + ;; + --monodotnet) + mono_dotnet=$2 + shift 2 + ;; + --wasm) + wasm_runtime_loc=$2 + shift 2 + ;; + --compare) + compare=true + shift 1 + ;; + --configurations) + configurations=$2 + shift 2 + ;; + --latestdotnet) + use_latest_dotnet=true + shift 1 + ;; + *) + echo "Common settings:" + echo " --corerootdirectory Directory where Core_Root exists, if running perf testing with --corerun" + echo " --architecture Architecture of the testing being run" + echo " --configurations List of key=value pairs that will be passed to perf testing infrastructure." + echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\"" + echo " --help Print help and exit" + echo "" + echo "Advanced settings:" + echo " --framework The framework to run, if not running in master" + echo " --compliationmode The compilation mode if not passing --configurations" + echo " --sourcedirectory The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY" + echo " --repository The name of the repository in the / format. Defaults to env:BUILD_REPOSITORY_NAME" + echo " --branch The name of the branch. Defaults to env:BUILD_SOURCEBRANCH" + echo " --commitsha The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION" + echo " --buildnumber The build number currently running. Defaults to env:BUILD_BUILDNUMBER" + echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj" + echo " --kind Related to csproj. The kind of benchmarks that should be run. Defaults to micro" + echo " --runcategories Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\"" + echo " --internal If the benchmarks are running as an official job." + echo " --monodotnet Pass the path to the mono dotnet for mono performance testing." + echo " --wasm Path to the unpacked wasm runtime pack." + echo " --latestdotnet --dotnet-versions will not be specified. --dotnet-versions defaults to LKG version in global.json " + echo "" + exit 0 + ;; + esac +done + +if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then + run_from_perf_repo=true +fi + +if [ -z "$configurations" ]; then + configurations="CompilationMode=$compilation_mode" +fi + +if [ -z "$core_root_directory" ]; then + use_core_run=false +fi + +if [ -z "$baseline_core_root_directory" ]; then + use_baseline_core_run=false +fi + +payload_directory=$source_directory/Payload +performance_directory=$payload_directory/performance +workitem_directory=$source_directory/workitem +extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true" +perflab_arguments= +queue=Ubuntu.1804.Amd64.Open +creator=$BUILD_DEFINITIONNAME +helix_source_prefix="pr" + +if [[ "$compare" == true ]]; then + extra_benchmark_dotnet_arguments= + perflab_arguments= + + # No open queues for arm64 + if [[ "$architecture" = "arm64" ]]; then + echo "Compare not available for arm64" + exit 1 + fi + + queue=Ubuntu.1804.Amd64.Tiger.Perf.Open +fi + +if [[ "$internal" == true ]]; then + perflab_arguments="--upload-to-perflab-container" + helix_source_prefix="official" + creator= + extra_benchmark_dotnet_arguments= + + if [[ "$architecture" = "arm64" ]]; then + queue=Ubuntu.1804.Arm64.Perf + else + queue=Ubuntu.1804.Amd64.Tiger.Perf + fi +fi + +if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "false" ]]; then + extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoMono" +fi + +if [[ "$wasm_runtime_loc" != "" ]]; then + configurations="CompilationMode=wasm RunKind=$kind" + extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoWASM NoMono" +fi + +if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "true" ]]; then + configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot" + extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoMono" +fi + +common_setup_arguments="--channel master --queue $queue --build-number $build_number --build-configs $configurations --architecture $architecture" +setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments" + + +if [[ "$use_latest_dotnet" = false ]]; then + # Get the tools section from the global.json. + # This grabs the LKG version number of dotnet and passes it to our scripts + dotnet_version=`cat global.json | python3 -c 'import json,sys;obj=json.load(sys.stdin);print(obj["tools"]["dotnet"])'` + setup_arguments="--dotnet-versions $dotnet_version $setup_arguments" +fi + +if [[ "$run_from_perf_repo" = true ]]; then + payload_directory= + workitem_directory=$source_directory + performance_directory=$workitem_directory + setup_arguments="--perf-hash $commit_sha $common_setup_arguments" +else + git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory + + docs_directory=$performance_directory/docs + mv $docs_directory $workitem_directory +fi + +if [[ "$wasm_runtime_loc" != "" ]]; then + using_wasm=true + wasm_dotnet_path=$payload_directory/dotnet-wasm + mv $wasm_runtime_loc $wasm_dotnet_path + extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --wasmMainJS \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm/runtime-test.js --wasmEngine /home/helixbot/.jsvu/v8 --customRuntimePack \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm" +fi + +if [[ "$mono_dotnet" != "" ]]; then + using_mono=true + mono_dotnet_path=$payload_directory/dotnet-mono + mv $mono_dotnet $mono_dotnet_path +fi + +if [[ "$use_core_run" = true ]]; then + new_core_root=$payload_directory/Core_Root + mv $core_root_directory $new_core_root +fi + +if [[ "$use_baseline_core_run" = true ]]; then + new_baseline_core_root=$payload_directory/Baseline_Core_Root + mv $baseline_core_root_directory $new_baseline_core_root +fi + +ci=true + +_script_dir=$(pwd)/eng/common +. "$_script_dir/pipeline-logging-functions.sh" + +# Make sure all of our variables are available for future steps +Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false +Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false +Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false +Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false +Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false +Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false +Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false +Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false +Write-PipelineSetVariable -name "Python" -value "python3" -is_multi_job_variable false +Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false +Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false +Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false +Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false +Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false +Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false +Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false +Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false +Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false +Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false +Write-PipelineSetVariable -name "MonoDotnet" -value "$using_mono" -is_multi_job_variable false +Write-PipelineSetVariable -name "WasmDotnet" -value "$using_wasm" -is_multi_job_variable false diff --git a/eng/common/pipeline-logging-functions.ps1 b/eng/common/pipeline-logging-functions.ps1 new file mode 100644 index 0000000..8e422c5 --- /dev/null +++ b/eng/common/pipeline-logging-functions.ps1 @@ -0,0 +1,260 @@ +# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1 and modified. + +# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1 + +$script:loggingCommandPrefix = '##vso[' +$script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"? + New-Object psobject -Property @{ Token = ';' ; Replacement = '%3B' } + New-Object psobject -Property @{ Token = "`r" ; Replacement = '%0D' } + New-Object psobject -Property @{ Token = "`n" ; Replacement = '%0A' } + New-Object psobject -Property @{ Token = "]" ; Replacement = '%5D' } +) +# TODO: BUG: Escape % ??? +# TODO: Add test to verify don't need to escape "=". + +# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set +function Write-PipelineTelemetryError { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Category, + [Parameter(Mandatory = $true)] + [string]$Message, + [Parameter(Mandatory = $false)] + [string]$Type = 'error', + [string]$ErrCode, + [string]$SourcePath, + [string]$LineNumber, + [string]$ColumnNumber, + [switch]$AsOutput, + [switch]$Force) + + $PSBoundParameters.Remove('Category') | Out-Null + + if ($Force -Or ((Test-Path variable:ci) -And $ci)) { + $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message" + } + $PSBoundParameters.Remove('Message') | Out-Null + $PSBoundParameters.Add('Message', $Message) + Write-PipelineTaskError @PSBoundParameters +} + +# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set +function Write-PipelineTaskError { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Message, + [Parameter(Mandatory = $false)] + [string]$Type = 'error', + [string]$ErrCode, + [string]$SourcePath, + [string]$LineNumber, + [string]$ColumnNumber, + [switch]$AsOutput, + [switch]$Force + ) + + if (!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) { + if ($Type -eq 'error') { + Write-Host $Message -ForegroundColor Red + return + } + elseif ($Type -eq 'warning') { + Write-Host $Message -ForegroundColor Yellow + return + } + } + + if (($Type -ne 'error') -and ($Type -ne 'warning')) { + Write-Host $Message + return + } + $PSBoundParameters.Remove('Force') | Out-Null + if (-not $PSBoundParameters.ContainsKey('Type')) { + $PSBoundParameters.Add('Type', 'error') + } + Write-LogIssue @PSBoundParameters +} + +function Write-PipelineSetVariable { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Name, + [string]$Value, + [switch]$Secret, + [switch]$AsOutput, + [bool]$IsMultiJobVariable = $true) + + if ((Test-Path variable:ci) -And $ci) { + Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{ + 'variable' = $Name + 'isSecret' = $Secret + 'isOutput' = $IsMultiJobVariable + } -AsOutput:$AsOutput + } +} + +function Write-PipelinePrependPath { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Path, + [switch]$AsOutput) + + if ((Test-Path variable:ci) -And $ci) { + Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput + } +} + +function Write-PipelineSetResult { + [CmdletBinding()] + param( + [ValidateSet("Succeeded", "SucceededWithIssues", "Failed", "Cancelled", "Skipped")] + [Parameter(Mandatory = $true)] + [string]$Result, + [string]$Message) + if ((Test-Path variable:ci) -And $ci) { + Write-LoggingCommand -Area 'task' -Event 'complete' -Data $Message -Properties @{ + 'result' = $Result + } + } +} + +<######################################## +# Private functions. +########################################> +function Format-LoggingCommandData { + [CmdletBinding()] + param([string]$Value, [switch]$Reverse) + + if (!$Value) { + return '' + } + + if (!$Reverse) { + foreach ($mapping in $script:loggingCommandEscapeMappings) { + $Value = $Value.Replace($mapping.Token, $mapping.Replacement) + } + } + else { + for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) { + $mapping = $script:loggingCommandEscapeMappings[$i] + $Value = $Value.Replace($mapping.Replacement, $mapping.Token) + } + } + + return $Value +} + +function Format-LoggingCommand { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Area, + [Parameter(Mandatory = $true)] + [string]$Event, + [string]$Data, + [hashtable]$Properties) + + # Append the preamble. + [System.Text.StringBuilder]$sb = New-Object -TypeName System.Text.StringBuilder + $null = $sb.Append($script:loggingCommandPrefix).Append($Area).Append('.').Append($Event) + + # Append the properties. + if ($Properties) { + $first = $true + foreach ($key in $Properties.Keys) { + [string]$value = Format-LoggingCommandData $Properties[$key] + if ($value) { + if ($first) { + $null = $sb.Append(' ') + $first = $false + } + else { + $null = $sb.Append(';') + } + + $null = $sb.Append("$key=$value") + } + } + } + + # Append the tail and output the value. + $Data = Format-LoggingCommandData $Data + $sb.Append(']').Append($Data).ToString() +} + +function Write-LoggingCommand { + [CmdletBinding(DefaultParameterSetName = 'Parameters')] + param( + [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')] + [string]$Area, + [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')] + [string]$Event, + [Parameter(ParameterSetName = 'Parameters')] + [string]$Data, + [Parameter(ParameterSetName = 'Parameters')] + [hashtable]$Properties, + [Parameter(Mandatory = $true, ParameterSetName = 'Object')] + $Command, + [switch]$AsOutput) + + if ($PSCmdlet.ParameterSetName -eq 'Object') { + Write-LoggingCommand -Area $Command.Area -Event $Command.Event -Data $Command.Data -Properties $Command.Properties -AsOutput:$AsOutput + return + } + + $command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties + if ($AsOutput) { + $command + } + else { + Write-Host $command + } +} + +function Write-LogIssue { + [CmdletBinding()] + param( + [ValidateSet('warning', 'error')] + [Parameter(Mandatory = $true)] + [string]$Type, + [string]$Message, + [string]$ErrCode, + [string]$SourcePath, + [string]$LineNumber, + [string]$ColumnNumber, + [switch]$AsOutput) + + $command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{ + 'type' = $Type + 'code' = $ErrCode + 'sourcepath' = $SourcePath + 'linenumber' = $LineNumber + 'columnnumber' = $ColumnNumber + } + if ($AsOutput) { + return $command + } + + if ($Type -eq 'error') { + $foregroundColor = $host.PrivateData.ErrorForegroundColor + $backgroundColor = $host.PrivateData.ErrorBackgroundColor + if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) { + $foregroundColor = [System.ConsoleColor]::Red + $backgroundColor = [System.ConsoleColor]::Black + } + } + else { + $foregroundColor = $host.PrivateData.WarningForegroundColor + $backgroundColor = $host.PrivateData.WarningBackgroundColor + if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) { + $foregroundColor = [System.ConsoleColor]::Yellow + $backgroundColor = [System.ConsoleColor]::Black + } + } + + Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor +} diff --git a/eng/common/pipeline-logging-functions.sh b/eng/common/pipeline-logging-functions.sh new file mode 100644 index 0000000..6a0b225 --- /dev/null +++ b/eng/common/pipeline-logging-functions.sh @@ -0,0 +1,206 @@ +#!/usr/bin/env bash + +function Write-PipelineTelemetryError { + local telemetry_category='' + local force=false + local function_args=() + local message='' + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -category|-c) + telemetry_category=$2 + shift + ;; + -force|-f) + force=true + ;; + -*) + function_args+=("$1 $2") + shift + ;; + *) + message=$* + ;; + esac + shift + done + + if [[ $force != true ]] && [[ "$ci" != true ]]; then + echo "$message" >&2 + return + fi + + if [[ $force == true ]]; then + function_args+=("-force") + fi + message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message" + function_args+=("$message") + Write-PipelineTaskError ${function_args[@]} +} + +function Write-PipelineTaskError { + local message_type="error" + local sourcepath='' + local linenumber='' + local columnnumber='' + local error_code='' + local force=false + + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -type|-t) + message_type=$2 + shift + ;; + -sourcepath|-s) + sourcepath=$2 + shift + ;; + -linenumber|-ln) + linenumber=$2 + shift + ;; + -columnnumber|-cn) + columnnumber=$2 + shift + ;; + -errcode|-e) + error_code=$2 + shift + ;; + -force|-f) + force=true + ;; + *) + break + ;; + esac + + shift + done + + if [[ $force != true ]] && [[ "$ci" != true ]]; then + echo "$@" >&2 + return + fi + + local message="##vso[task.logissue" + + message="$message type=$message_type" + + if [ -n "$sourcepath" ]; then + message="$message;sourcepath=$sourcepath" + fi + + if [ -n "$linenumber" ]; then + message="$message;linenumber=$linenumber" + fi + + if [ -n "$columnnumber" ]; then + message="$message;columnnumber=$columnnumber" + fi + + if [ -n "$error_code" ]; then + message="$message;code=$error_code" + fi + + message="$message]$*" + echo "$message" +} + +function Write-PipelineSetVariable { + if [[ "$ci" != true ]]; then + return + fi + + local name='' + local value='' + local secret=false + local as_output=false + local is_multi_job_variable=true + + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -name|-n) + name=$2 + shift + ;; + -value|-v) + value=$2 + shift + ;; + -secret|-s) + secret=true + ;; + -as_output|-a) + as_output=true + ;; + -is_multi_job_variable|-i) + is_multi_job_variable=$2 + shift + ;; + esac + shift + done + + value=${value/;/%3B} + value=${value/\\r/%0D} + value=${value/\\n/%0A} + value=${value/]/%5D} + + local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value" + + if [[ "$as_output" == true ]]; then + $message + else + echo "$message" + fi +} + +function Write-PipelinePrependPath { + local prepend_path='' + + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -path|-p) + prepend_path=$2 + shift + ;; + esac + shift + done + + export PATH="$prepend_path:$PATH" + + if [[ "$ci" == true ]]; then + echo "##vso[task.prependpath]$prepend_path" + fi +} + +function Write-PipelineSetResult { + local result='' + local message='' + + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -result|-r) + result=$2 + shift + ;; + -message|-m) + message=$2 + shift + ;; + esac + shift + done + + if [[ "$ci" == true ]]; then + echo "##vso[task.complete result=$result;]$message" + fi +} diff --git a/eng/common/post-build/add-build-to-channel.ps1 b/eng/common/post-build/add-build-to-channel.ps1 new file mode 100644 index 0000000..de2d957 --- /dev/null +++ b/eng/common/post-build/add-build-to-channel.ps1 @@ -0,0 +1,48 @@ +param( + [Parameter(Mandatory=$true)][int] $BuildId, + [Parameter(Mandatory=$true)][int] $ChannelId, + [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com', + [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16' +) + +try { + . $PSScriptRoot\post-build-utils.ps1 + + # Check that the channel we are going to promote the build to exist + $channelInfo = Get-MaestroChannel -ChannelId $ChannelId + + if (!$channelInfo) { + Write-PipelineTelemetryCategory -Category 'PromoteBuild' -Message "Channel with BAR ID $ChannelId was not found in BAR!" + ExitWithExitCode 1 + } + + # Get info about which channel(s) the build has already been promoted to + $buildInfo = Get-MaestroBuild -BuildId $BuildId + + if (!$buildInfo) { + Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "Build with BAR ID $BuildId was not found in BAR!" + ExitWithExitCode 1 + } + + # Find whether the build is already assigned to the channel or not + if ($buildInfo.channels) { + foreach ($channel in $buildInfo.channels) { + if ($channel.Id -eq $ChannelId) { + Write-Host "The build with BAR ID $BuildId is already on channel $ChannelId!" + ExitWithExitCode 0 + } + } + } + + Write-Host "Promoting build '$BuildId' to channel '$ChannelId'." + + Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId + + Write-Host 'done.' +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'" + ExitWithExitCode 1 +} diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1 new file mode 100644 index 0000000..63f3464 --- /dev/null +++ b/eng/common/post-build/check-channel-consistency.ps1 @@ -0,0 +1,40 @@ +param( + [Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to + [Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation +) + +try { + . $PSScriptRoot\post-build-utils.ps1 + + if ($PromoteToChannels -eq "") { + Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info." + ExitWithExitCode 0 + } + + # Check that every channel that Maestro told to promote the build to + # is available in YAML + $PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ } + + $hasErrors = $false + + foreach ($id in $PromoteToChannelsIds) { + if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) { + Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng." + $hasErrors = $true + } + } + + # The `Write-PipelineTaskError` doesn't error the script and we might report several errors + # in the previous lines. The check below makes sure that we return an error state from the + # script if we reported any validation error + if ($hasErrors) { + ExitWithExitCode 1 + } + + Write-Host 'done.' +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration." + ExitWithExitCode 1 +} diff --git a/eng/common/post-build/nuget-validation.ps1 b/eng/common/post-build/nuget-validation.ps1 new file mode 100644 index 0000000..dab3534 --- /dev/null +++ b/eng/common/post-build/nuget-validation.ps1 @@ -0,0 +1,24 @@ +# This script validates NuGet package metadata information using this +# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage + +param( + [Parameter(Mandatory=$true)][string] $PackagesPath, # Path to where the packages to be validated are + [Parameter(Mandatory=$true)][string] $ToolDestinationPath # Where the validation tool should be downloaded to +) + +try { + . $PSScriptRoot\post-build-utils.ps1 + + $url = 'https://raw.githubusercontent.com/NuGet/NuGetGallery/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1' + + New-Item -ItemType 'directory' -Path ${ToolDestinationPath} -Force + + Invoke-WebRequest $url -OutFile ${ToolDestinationPath}\verify.ps1 + + & ${ToolDestinationPath}\verify.ps1 ${PackagesPath}\*.nupkg +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1 new file mode 100644 index 0000000..534f698 --- /dev/null +++ b/eng/common/post-build/post-build-utils.ps1 @@ -0,0 +1,91 @@ +# Most of the functions in this file require the variables `MaestroApiEndPoint`, +# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available. + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +# `tools.ps1` checks $ci to perform some actions. Since the post-build +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +$disableConfigureToolsetImport = $true +. $PSScriptRoot\..\tools.ps1 + +function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') { + Validate-MaestroVars + + $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' + $headers.Add('Accept', $ContentType) + $headers.Add('Authorization',"Bearer $MaestroApiAccessToken") + return $headers +} + +function Get-MaestroChannel([int]$ChannelId) { + Validate-MaestroVars + + $apiHeaders = Create-MaestroApiRequestHeaders + $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion" + + $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } + return $result +} + +function Get-MaestroBuild([int]$BuildId) { + Validate-MaestroVars + + $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken + $apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion" + + $result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } + return $result +} + +function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) { + Validate-MaestroVars + + $SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository) + $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken + $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion" + + $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } + return $result +} + +function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) { + Validate-MaestroVars + + $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken + $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion" + Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null +} + +function Trigger-Subscription([string]$SubscriptionId) { + Validate-MaestroVars + + $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken + $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion" + Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null +} + +function Validate-MaestroVars { + try { + Get-Variable MaestroApiEndPoint | Out-Null + Get-Variable MaestroApiVersion | Out-Null + Get-Variable MaestroApiAccessToken | Out-Null + + if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) { + Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'" + ExitWithExitCode 1 + } + + if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) { + Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'" + ExitWithExitCode 1 + } + } + catch { + Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.' + Write-Host $_ + ExitWithExitCode 1 + } +} diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1 new file mode 100644 index 0000000..8508397 --- /dev/null +++ b/eng/common/post-build/publish-using-darc.ps1 @@ -0,0 +1,54 @@ +param( + [Parameter(Mandatory=$true)][int] $BuildId, + [Parameter(Mandatory=$true)][int] $PublishingInfraVersion, + [Parameter(Mandatory=$true)][string] $AzdoToken, + [Parameter(Mandatory=$true)][string] $MaestroToken, + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com', + [Parameter(Mandatory=$true)][string] $WaitPublishingFinish, + [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, + [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters +) + +try { + . $PSScriptRoot\post-build-utils.ps1 + + $darc = Get-Darc + + $optionalParams = [System.Collections.ArrayList]::new() + + if ("" -ne $ArtifactsPublishingAdditionalParameters) { + $optionalParams.Add("--artifact-publishing-parameters") | Out-Null + $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null + } + + if ("" -ne $SymbolPublishingAdditionalParameters) { + $optionalParams.Add("--symbol-publishing-parameters") | Out-Null + $optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null + } + + if ("false" -eq $WaitPublishingFinish) { + $optionalParams.Add("--no-wait") | Out-Null + } + + & $darc add-build-to-channel ` + --id $buildId ` + --publishing-infra-version $PublishingInfraVersion ` + --default-channels ` + --source-branch main ` + --azdev-pat $AzdoToken ` + --bar-uri $MaestroApiEndPoint ` + --password $MaestroToken ` + @optionalParams + + if ($LastExitCode -ne 0) { + Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..." + exit 1 + } + + Write-Host 'done.' +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels." + ExitWithExitCode 1 +} diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1 new file mode 100644 index 0000000..4011d32 --- /dev/null +++ b/eng/common/post-build/sourcelink-validation.ps1 @@ -0,0 +1,319 @@ +param( + [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored + [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation + [Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade + [Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages + [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use +) + +. $PSScriptRoot\post-build-utils.ps1 + +# Cache/HashMap (File -> Exist flag) used to consult whether a file exist +# in the repository at a specific commit point. This is populated by inserting +# all files present in the repo at a specific commit point. +$global:RepoFiles = @{} + +# Maximum number of jobs to run in parallel +$MaxParallelJobs = 16 + +$MaxRetries = 5 +$RetryWaitTimeInSeconds = 30 + +# Wait time between check for system load +$SecondsBetweenLoadChecks = 10 + +if (!$InputPath -or !(Test-Path $InputPath)){ + Write-Host "No files to validate." + ExitWithExitCode 0 +} + +$ValidatePackage = { + param( + [string] $PackagePath # Full path to a Symbols.NuGet package + ) + + . $using:PSScriptRoot\..\tools.ps1 + + # Ensure input file exist + if (!(Test-Path $PackagePath)) { + Write-Host "Input file does not exist: $PackagePath" + return [pscustomobject]@{ + result = 1 + packagePath = $PackagePath + } + } + + # Extensions for which we'll look for SourceLink information + # For now we'll only care about Portable & Embedded PDBs + $RelevantExtensions = @('.dll', '.exe', '.pdb') + + Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...' + + $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) + $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId + $FailedFiles = 0 + + Add-Type -AssemblyName System.IO.Compression.FileSystem + + [System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null + + try { + $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) + + $zip.Entries | + Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | + ForEach-Object { + $FileName = $_.FullName + $Extension = [System.IO.Path]::GetExtension($_.Name) + $FakeName = -Join((New-Guid), $Extension) + $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName + + # We ignore resource DLLs + if ($FileName.EndsWith('.resources.dll')) { + return [pscustomobject]@{ + result = 0 + packagePath = $PackagePath + } + } + + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true) + + $ValidateFile = { + param( + [string] $FullPath, # Full path to the module that has to be checked + [string] $RealPath, + [ref] $FailedFiles + ) + + $sourcelinkExe = "$env:USERPROFILE\.dotnet\tools" + $sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe" + $SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String + + if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) { + $NumFailedLinks = 0 + + # We only care about Http addresses + $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches + + if ($Matches.Count -ne 0) { + $Matches.Value | + ForEach-Object { + $Link = $_ + $CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/" + + $FilePath = $Link.Replace($CommitUrl, "") + $Status = 200 + $Cache = $using:RepoFiles + + $attempts = 0 + + while ($attempts -lt $using:MaxRetries) { + if ( !($Cache.ContainsKey($FilePath)) ) { + try { + $Uri = $Link -as [System.URI] + + if ($Link -match "submodules") { + # Skip submodule links until sourcelink properly handles submodules + $Status = 200 + } + elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) { + # Only GitHub links are valid + $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode + } + else { + # If it's not a github link, we want to break out of the loop and not retry. + $Status = 0 + $attempts = $using:MaxRetries + } + } + catch { + Write-Host $_ + $Status = 0 + } + } + + if ($Status -ne 200) { + $attempts++ + + if ($attempts -lt $using:MaxRetries) + { + $attemptsLeft = $using:MaxRetries - $attempts + Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds" + Start-Sleep -Seconds $using:RetryWaitTimeInSeconds + } + else { + if ($NumFailedLinks -eq 0) { + if ($FailedFiles.Value -eq 0) { + Write-Host + } + + Write-Host "`tFile $RealPath has broken links:" + } + + Write-Host "`t`tFailed to retrieve $Link" + + $NumFailedLinks++ + } + } + else { + break + } + } + } + } + + if ($NumFailedLinks -ne 0) { + $FailedFiles.value++ + $global:LASTEXITCODE = 1 + } + } + } + + &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles) + } + } + catch { + Write-Host $_ + } + finally { + $zip.Dispose() + } + + if ($FailedFiles -eq 0) { + Write-Host 'Passed.' + return [pscustomobject]@{ + result = 0 + packagePath = $PackagePath + } + } + else { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links." + return [pscustomobject]@{ + result = 1 + packagePath = $PackagePath + } + } +} + +function CheckJobResult( + $result, + $packagePath, + [ref]$ValidationFailures, + [switch]$logErrors) { + if ($result -ne '0') { + if ($logErrors) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links." + } + $ValidationFailures.Value++ + } +} + +function ValidateSourceLinkLinks { + if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) { + if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'" + ExitWithExitCode 1 + } + else { + $GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2'; + } + } + + if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'" + ExitWithExitCode 1 + } + + if ($GHRepoName -ne '' -and $GHCommit -ne '') { + $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1') + $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript') + + try { + # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash + $Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree + + foreach ($file in $Data) { + $Extension = [System.IO.Path]::GetExtension($file.path) + + if ($CodeExtensions.Contains($Extension)) { + $RepoFiles[$file.path] = 1 + } + } + } + catch { + Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching." + } + } + elseif ($GHRepoName -ne '' -or $GHCommit -ne '') { + Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.' + } + + if (Test-Path $ExtractPath) { + Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue + } + + $ValidationFailures = 0 + + # Process each NuGet package in parallel + Get-ChildItem "$InputPath\*.symbols.nupkg" | + ForEach-Object { + Write-Host "Starting $($_.FullName)" + Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null + $NumJobs = @(Get-Job -State 'Running').Count + + while ($NumJobs -ge $MaxParallelJobs) { + Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again." + sleep $SecondsBetweenLoadChecks + $NumJobs = @(Get-Job -State 'Running').Count + } + + foreach ($Job in @(Get-Job -State 'Completed')) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors + Remove-Job -Id $Job.Id + } + } + + foreach ($Job in @(Get-Job)) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) + Remove-Job -Id $Job.Id + } + if ($ValidationFailures -gt 0) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation." + ExitWithExitCode 1 + } +} + +function InstallSourcelinkCli { + $sourcelinkCliPackageName = 'sourcelink' + + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + $toolList = & "$dotnet" tool list --global + + if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) { + Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed." + } + else { + Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..." + Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' + & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global + } +} + +try { + InstallSourcelinkCli + + foreach ($Job in @(Get-Job)) { + Remove-Job -Id $Job.Id + } + + ValidateSourceLinkLinks +} +catch { + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'SourceLink' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1 new file mode 100644 index 0000000..cd2181b --- /dev/null +++ b/eng/common/post-build/symbols-validation.ps1 @@ -0,0 +1,339 @@ +param( + [Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored + [Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation + [Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use + [Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs + [Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error + [Parameter(Mandatory = $false)][switch] $Clean, # Clean extracted symbols directory after checking symbols + [Parameter(Mandatory = $false)][string] $SymbolExclusionFile # Exclude the symbols in the file from publishing to symbol server +) + +. $PSScriptRoot\..\tools.ps1 +# Maximum number of jobs to run in parallel +$MaxParallelJobs = 16 + +# Max number of retries +$MaxRetry = 5 + +# Wait time between check for system load +$SecondsBetweenLoadChecks = 10 + +# Set error codes +Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1 +Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2 + +$WindowsPdbVerificationParam = "" +if ($CheckForWindowsPdbs) { + $WindowsPdbVerificationParam = "--windows-pdbs" +} + +$ExclusionSet = New-Object System.Collections.Generic.HashSet[string]; + +if (!$InputPath -or !(Test-Path $InputPath)){ + Write-Host "No symbols to validate." + ExitWithExitCode 0 +} + +#Check if the path exists +if ($SymbolExclusionFile -and (Test-Path $SymbolExclusionFile)){ + [string[]]$Exclusions = Get-Content "$SymbolExclusionFile" + $Exclusions | foreach { if($_ -and $_.Trim()){$ExclusionSet.Add($_)} } +} +else{ + Write-Host "Symbol Exclusion file does not exists. No symbols to exclude." +} + +$CountMissingSymbols = { + param( + [string] $PackagePath, # Path to a NuGet package + [string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs + ) + + Add-Type -AssemblyName System.IO.Compression.FileSystem + + Write-Host "Validating $PackagePath " + + # Ensure input file exist + if (!(Test-Path $PackagePath)) { + Write-PipelineTaskError "Input file does not exist: $PackagePath" + return [pscustomobject]@{ + result = $using:ERROR_FILEDOESNOTEXIST + packagePath = $PackagePath + } + } + + # Extensions for which we'll look for symbols + $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib') + + # How many files are missing symbol information + $MissingSymbols = 0 + + $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) + $PackageGuid = New-Guid + $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageGuid + $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols' + + try { + [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath) + } + catch { + Write-Host "Something went wrong extracting $PackagePath" + Write-Host $_ + return [pscustomobject]@{ + result = $using:ERROR_BADEXTRACT + packagePath = $PackagePath + } + } + + Get-ChildItem -Recurse $ExtractPath | + Where-Object { $RelevantExtensions -contains $_.Extension } | + ForEach-Object { + $FileName = $_.FullName + if ($FileName -Match '\\ref\\') { + Write-Host "`t Ignoring reference assembly file " $FileName + return + } + + $FirstMatchingSymbolDescriptionOrDefault = { + param( + [string] $FullPath, # Full path to the module that has to be checked + [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols + [string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs. + [string] $SymbolsPath + ) + + $FileName = [System.IO.Path]::GetFileName($FullPath) + $Extension = [System.IO.Path]::GetExtension($FullPath) + + # Those below are potential symbol files that the `dotnet symbol` might + # return. Which one will be returned depend on the type of file we are + # checking and which type of file was uploaded. + + # The file itself is returned + $SymbolPath = $SymbolsPath + '\' + $FileName + + # PDB file for the module + $PdbPath = $SymbolPath.Replace($Extension, '.pdb') + + # PDB file for R2R module (created by crossgen) + $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb') + + # DBG file for a .so library + $SODbg = $SymbolPath.Replace($Extension, '.so.dbg') + + # DWARF file for a .dylib + $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf') + + $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools" + $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe" + + $totalRetries = 0 + + while ($totalRetries -lt $using:MaxRetry) { + + # Save the output and get diagnostic output + $output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String + + if ((Test-Path $PdbPath) -and (Test-path $SymbolPath)) { + return 'Module and PDB for Module' + } + elseif ((Test-Path $NGenPdb) -and (Test-Path $PdbPath) -and (Test-Path $SymbolPath)) { + return 'Dll, PDB and NGen PDB' + } + elseif ((Test-Path $SODbg) -and (Test-Path $SymbolPath)) { + return 'So and DBG for SO' + } + elseif ((Test-Path $DylibDwarf) -and (Test-Path $SymbolPath)) { + return 'Dylib and Dwarf for Dylib' + } + elseif (Test-Path $SymbolPath) { + return 'Module' + } + else + { + $totalRetries++ + } + } + + return $null + } + + $FileRelativePath = $FileName.Replace("$ExtractPath\", "") + if (($($using:ExclusionSet) -ne $null) -and ($($using:ExclusionSet).Contains($FileRelativePath) -or ($($using:ExclusionSet).Contains($FileRelativePath.Replace("\", "/"))))){ + Write-Host "Skipping $FileName from symbol validation" + } + + else { + $FileGuid = New-Guid + $ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid + + $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault ` + -FullPath $FileName ` + -TargetServerParam '--microsoft-symbol-server' ` + -SymbolsPath "$ExpandedSymbolsPath-msdl" ` + -WindowsPdbVerificationParam $WindowsPdbVerificationParam + $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault ` + -FullPath $FileName ` + -TargetServerParam '--internal-server' ` + -SymbolsPath "$ExpandedSymbolsPath-symweb" ` + -WindowsPdbVerificationParam $WindowsPdbVerificationParam + + Write-Host -NoNewLine "`t Checking file " $FileName "... " + + if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) { + Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)" + } + else { + $MissingSymbols++ + + if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) { + Write-Host 'No symbols found on MSDL or SymWeb!' + } + else { + if ($SymbolsOnMSDL -eq $null) { + Write-Host 'No symbols found on MSDL!' + } + else { + Write-Host 'No symbols found on SymWeb!' + } + } + } + } + } + + if ($using:Clean) { + Remove-Item $ExtractPath -Recurse -Force + } + + Pop-Location + + return [pscustomobject]@{ + result = $MissingSymbols + packagePath = $PackagePath + } +} + +function CheckJobResult( + $result, + $packagePath, + [ref]$DupedSymbols, + [ref]$TotalFailures) { + if ($result -eq $ERROR_BADEXTRACT) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files" + $DupedSymbols.Value++ + } + elseif ($result -eq $ERROR_FILEDOESNOTEXIST) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath does not exist" + $TotalFailures.Value++ + } + elseif ($result -gt '0') { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath" + $TotalFailures.Value++ + } + else { + Write-Host "All symbols verified for package $packagePath" + } +} + +function CheckSymbolsAvailable { + if (Test-Path $ExtractPath) { + Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue + } + + $TotalPackages = 0 + $TotalFailures = 0 + $DupedSymbols = 0 + + Get-ChildItem "$InputPath\*.nupkg" | + ForEach-Object { + $FileName = $_.Name + $FullName = $_.FullName + + # These packages from Arcade-Services include some native libraries that + # our current symbol uploader can't handle. Below is a workaround until + # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted. + if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') { + Write-Host "Ignoring Arcade-services file: $FileName" + Write-Host + return + } + elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') { + Write-Host "Ignoring Arcade-services file: $FileName" + Write-Host + return + } + + $TotalPackages++ + + Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null + + $NumJobs = @(Get-Job -State 'Running').Count + + while ($NumJobs -ge $MaxParallelJobs) { + Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again." + sleep $SecondsBetweenLoadChecks + $NumJobs = @(Get-Job -State 'Running').Count + } + + foreach ($Job in @(Get-Job -State 'Completed')) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures) + Remove-Job -Id $Job.Id + } + Write-Host + } + + foreach ($Job in @(Get-Job)) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures) + } + + if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) { + if ($TotalFailures -gt 0) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages" + } + + if ($DupedSymbols -gt 0) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted" + } + + ExitWithExitCode 1 + } + else { + Write-Host "All symbols validated!" + } +} + +function InstallDotnetSymbol { + $dotnetSymbolPackageName = 'dotnet-symbol' + + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + $toolList = & "$dotnet" tool list --global + + if (($toolList -like "*$dotnetSymbolPackageName*") -and ($toolList -like "*$dotnetSymbolVersion*")) { + Write-Host "dotnet-symbol version $dotnetSymbolVersion is already installed." + } + else { + Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..." + Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' + & "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global + } +} + +try { + . $PSScriptRoot\post-build-utils.ps1 + + InstallDotnetSymbol + + foreach ($Job in @(Get-Job)) { + Remove-Job -Id $Job.Id + } + + CheckSymbolsAvailable +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1 new file mode 100644 index 0000000..55dea51 --- /dev/null +++ b/eng/common/post-build/trigger-subscriptions.ps1 @@ -0,0 +1,64 @@ +param( + [Parameter(Mandatory=$true)][string] $SourceRepo, + [Parameter(Mandatory=$true)][int] $ChannelId, + [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com', + [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16' +) + +try { + . $PSScriptRoot\post-build-utils.ps1 + + # Get all the $SourceRepo subscriptions + $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '') + $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId + + if (!$subscriptions) { + Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'" + ExitWithExitCode 0 + } + + $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string] + $failedTriggeredSubscription = $false + + # Get all enabled subscriptions that need dependency flow on 'everyBuild' + foreach ($subscription in $subscriptions) { + if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) { + Write-Host "Should trigger this subscription: ${$subscription.id}" + [void]$subscriptionsToTrigger.Add($subscription.id) + } + } + + foreach ($subscriptionToTrigger in $subscriptionsToTrigger) { + try { + Write-Host "Triggering subscription '$subscriptionToTrigger'." + + Trigger-Subscription -SubscriptionId $subscriptionToTrigger + + Write-Host 'done.' + } + catch + { + Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'" + Write-Host $_ + Write-Host $_.ScriptStackTrace + $failedTriggeredSubscription = $true + } + } + + if ($subscriptionsToTrigger.Count -eq 0) { + Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'." + } + elseif ($failedTriggeredSubscription) { + Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...' + ExitWithExitCode 1 + } + else { + Write-Host 'All subscriptions were triggered successfully!' + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/retain-build.ps1 b/eng/common/retain-build.ps1 new file mode 100644 index 0000000..e7ba975 --- /dev/null +++ b/eng/common/retain-build.ps1 @@ -0,0 +1,45 @@ + +Param( +[Parameter(Mandatory=$true)][int] $buildId, +[Parameter(Mandatory=$true)][string] $azdoOrgUri, +[Parameter(Mandatory=$true)][string] $azdoProject, +[Parameter(Mandatory=$true)][string] $token +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +function Get-AzDOHeaders( + [string] $token) +{ + $base64AuthInfo = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":${token}")) + $headers = @{"Authorization"="Basic $base64AuthInfo"} + return $headers +} + +function Update-BuildRetention( + [string] $azdoOrgUri, + [string] $azdoProject, + [int] $buildId, + [string] $token) +{ + $headers = Get-AzDOHeaders -token $token + $requestBody = "{ + `"keepForever`": `"true`" + }" + + $requestUri = "${azdoOrgUri}/${azdoProject}/_apis/build/builds/${buildId}?api-version=6.0" + write-Host "Attempting to retain build using the following URI: ${requestUri} ..." + + try { + Invoke-RestMethod -Uri $requestUri -Method Patch -Body $requestBody -Header $headers -contentType "application/json" + Write-Host "Updated retention settings for build ${buildId}." + } + catch { + Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription" + exit 1 + } +} + +Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token +exit 0 diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 new file mode 100644 index 0000000..73828dd --- /dev/null +++ b/eng/common/sdk-task.ps1 @@ -0,0 +1,97 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string] $configuration = 'Debug', + [string] $task, + [string] $verbosity = 'minimal', + [string] $msbuildEngine = $null, + [switch] $restore, + [switch] $prepareMachine, + [switch] $help, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties +) + +$ci = $true +$binaryLog = $true +$warnAsError = $true + +. $PSScriptRoot\tools.ps1 + +function Print-Usage() { + Write-Host "Common settings:" + Write-Host " -task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)" + Write-Host " -restore Restore dependencies" + Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]" + Write-Host " -help Print help and exit" + Write-Host "" + + Write-Host "Advanced settings:" + Write-Host " -prepareMachine Prepare machine for CI run" + Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." + Write-Host "" + Write-Host "Command line arguments not listed above are passed thru to msbuild." +} + +function Build([string]$target) { + $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" } + $log = Join-Path $LogDir "$task$logSuffix.binlog" + $outputPath = Join-Path $ToolsetDir "$task\" + + MSBuild $taskProject ` + /bl:$log ` + /t:$target ` + /p:Configuration=$configuration ` + /p:RepoRoot=$RepoRoot ` + /p:BaseIntermediateOutputPath=$outputPath ` + /v:$verbosity ` + @properties +} + +try { + if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) { + Print-Usage + exit 0 + } + + if ($task -eq "") { + Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" + Print-Usage + ExitWithExitCode 1 + } + + if( $msbuildEngine -eq "vs") { + # Ensure desktop MSBuild is available for sdk tasks. + if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) { + $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty + } + if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.1-2" -MemberType NoteProperty + } + if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { + $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true + } + if ($xcopyMSBuildToolsFolder -eq $null) { + throw 'Unable to get xcopy downloadable version of msbuild' + } + + $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe" + } + + $taskProject = GetSdkTaskProject $task + if (!(Test-Path $taskProject)) { + Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" + ExitWithExitCode 1 + } + + if ($restore) { + Build 'Restore' + } + + Build 'Execute' +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Build' -Message $_ + ExitWithExitCode 1 +} + +ExitWithExitCode 0 diff --git a/eng/common/sdl/NuGet.config b/eng/common/sdl/NuGet.config new file mode 100644 index 0000000..3849bdb --- /dev/null +++ b/eng/common/sdl/NuGet.config @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1 new file mode 100644 index 0000000..27f5a41 --- /dev/null +++ b/eng/common/sdl/configure-sdl-tool.ps1 @@ -0,0 +1,130 @@ +Param( + [string] $GuardianCliLocation, + [string] $WorkingDirectory, + [string] $TargetDirectory, + [string] $GdnFolder, + # The list of Guardian tools to configure. For each object in the array: + # - If the item is a [hashtable], it must contain these entries: + # - Name = The tool name as Guardian knows it. + # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique + # among all tool entries with the same Name. + # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")' + # - If the item is a [string] $v, it is treated as '@{ Name="$v" }' + [object[]] $ToolsList, + [string] $GuardianLoggerLevel='Standard', + # Optional: Additional params to add to any tool using CredScan. + [string[]] $CrScanAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using PoliCheck. + [string[]] $PoliCheckAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using CodeQL/Semmle. + [string[]] $CodeQLAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using Binskim. + [string[]] $BinskimAdditionalRunConfigParams +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + # Normalize tools list: all in [hashtable] form with defined values for each key. + $ToolsList = $ToolsList | + ForEach-Object { + if ($_ -is [string]) { + $_ = @{ Name = $_ } + } + + if (-not ($_['Scenario'])) { $_.Scenario = "" } + if (-not ($_['Args'])) { $_.Args = @() } + $_ + } + + Write-Host "List of tools to configure:" + $ToolsList | ForEach-Object { $_ | Out-String | Write-Host } + + # We store config files in the r directory of .gdn + $gdnConfigPath = Join-Path $GdnFolder 'r' + $ValidPath = Test-Path $GuardianCliLocation + + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." + ExitWithExitCode 1 + } + + foreach ($tool in $ToolsList) { + # Put together the name and scenario to make a unique key. + $toolConfigName = $tool.Name + if ($tool.Scenario) { + $toolConfigName += "_" + $tool.Scenario + } + + Write-Host "=== Configuring $toolConfigName..." + + $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig" + + # For some tools, add default and automatic args. + switch -Exact ($tool.Name) { + 'credscan' { + if ($targetDirectory) { + $tool.Args += "`"TargetDirectory < $TargetDirectory`"" + } + $tool.Args += "`"OutputType < pre`"" + $tool.Args += $CrScanAdditionalRunConfigParams + } + 'policheck' { + if ($targetDirectory) { + $tool.Args += "`"Target < $TargetDirectory`"" + } + $tool.Args += $PoliCheckAdditionalRunConfigParams + } + {$_ -in 'semmle', 'codeql'} { + if ($targetDirectory) { + $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`"" + } + $tool.Args += $CodeQLAdditionalRunConfigParams + } + 'binskim' { + if ($targetDirectory) { + # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924. + # We are excluding all `_.pdb` files from the scan. + $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`"" + } + $tool.Args += $BinskimAdditionalRunConfigParams + } + } + + # Create variable pointing to the args array directly so we can use splat syntax later. + $toolArgs = $tool.Args + + # Configure the tool. If args array is provided or the current tool has some default arguments + # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}", + # one per parameter. Doc page for "guardian configure": + # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure + Exec-BlockVerbosely { + & $GuardianCliLocation configure ` + --working-directory $WorkingDirectory ` + --tool $tool.Name ` + --output-path $gdnConfigFile ` + --logger-level $GuardianLoggerLevel ` + --noninteractive ` + --force ` + $(if ($toolArgs) { "--args" }) @toolArgs + Exit-IfNZEC "Sdl" + } + + Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile" + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1 new file mode 100644 index 0000000..4715d75 --- /dev/null +++ b/eng/common/sdl/execute-all-sdl-tools.ps1 @@ -0,0 +1,167 @@ +Param( + [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified) + [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified) + [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified + [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade) + [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master + [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located + [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located + [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault + + # Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list + # format. + [object[]] $SourceToolsList, + # Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools + # list format. + [object[]] $ArtifactToolsList, + # Optional: list of SDL tools to run without automatically specifying a target directory. See + # 'configure-sdl-tool.ps1' for tools list format. + [object[]] $CustomToolsList, + + [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs. + [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs. + [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber) + [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed + [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs. + [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs. + [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs. + [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. + [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. + [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error + [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1") + [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1") + [string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1") + [string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1") + [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run +) + +try { + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + $disableConfigureToolsetImport = $true + $global:LASTEXITCODE = 0 + + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + #Replace repo names to the format of org/repo + if (!($Repository.contains('/'))) { + $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2'; + } + else{ + $RepoName = $Repository; + } + + if ($GuardianPackageName) { + $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd')) + } else { + $guardianCliLocation = $GuardianCliLocation + } + + $workingDirectory = (Split-Path $SourceDirectory -Parent) + $ValidPath = Test-Path $guardianCliLocation + + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.' + ExitWithExitCode 1 + } + + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel + } + $gdnFolder = Join-Path $workingDirectory '.gdn' + + if ($TsaOnboard) { + if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) { + Exec-BlockVerbosely { + & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + } else { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.' + ExitWithExitCode 1 + } + } + + # Configure a list of tools with a default target directory. Populates the ".gdn/r" directory. + function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) { + if ($tools -and $tools.Count -gt 0) { + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') ` + -GuardianCliLocation $guardianCliLocation ` + -WorkingDirectory $workingDirectory ` + -TargetDirectory $targetDirectory ` + -GdnFolder $gdnFolder ` + -ToolsList $tools ` + -AzureDevOpsAccessToken $AzureDevOpsAccessToken ` + -GuardianLoggerLevel $GuardianLoggerLevel ` + -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams ` + -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams ` + -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams ` + -BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams + if ($BreakOnFailure) { + Exit-IfNZEC "Sdl" + } + } + } + } + + # Configure Artifact and Source tools with default Target directories. + Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory + Configure-ToolsList $SourceToolsList $SourceDirectory + # Configure custom tools with no default Target directory. + Configure-ToolsList $CustomToolsList $null + + # At this point, all tools are configured in the ".gdn" directory. Run them all in a single call. + # (If we used "run" multiple times, each run would overwrite data from earlier runs.) + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'run-sdl.ps1') ` + -GuardianCliLocation $guardianCliLocation ` + -WorkingDirectory $SourceDirectory ` + -UpdateBaseline $UpdateBaseline ` + -GdnFolder $gdnFolder + } + + if ($TsaPublish) { + if ($TsaBranchName -and $BuildNumber) { + if (-not $TsaRepositoryName) { + $TsaRepositoryName = "$($Repository)-$($BranchName)" + } + Exec-BlockVerbosely { + & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + } else { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.' + ExitWithExitCode 1 + } + } + + if ($BreakOnFailure) { + Write-Host "Failing the build in case of breaking results..." + Exec-BlockVerbosely { + & $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } + } else { + Write-Host "Letting the build pass even if there were breaking results..." + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + exit 1 +} diff --git a/eng/common/sdl/extract-artifact-archives.ps1 b/eng/common/sdl/extract-artifact-archives.ps1 new file mode 100644 index 0000000..68da4fb --- /dev/null +++ b/eng/common/sdl/extract-artifact-archives.ps1 @@ -0,0 +1,63 @@ +# This script looks for each archive file in a directory and extracts it into the target directory. +# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**". +# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip. +param( + # Full path to directory where archives are stored. + [Parameter(Mandatory=$true)][string] $InputPath, + # Full path to directory to extract archives into. May be the same as $InputPath. + [Parameter(Mandatory=$true)][string] $ExtractPath +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + Measure-Command { + $jobs = @() + + # Find archive files for non-Windows and Windows builds. + $archiveFiles = @( + Get-ChildItem (Join-Path $InputPath "*.tar.gz") + Get-ChildItem (Join-Path $InputPath "*.zip") + ) + + foreach ($targzFile in $archiveFiles) { + $jobs += Start-Job -ScriptBlock { + $file = $using:targzFile + $fileName = [System.IO.Path]::GetFileName($file) + $extractDir = Join-Path $using:ExtractPath "$fileName.extracted" + + New-Item $extractDir -ItemType Directory -Force | Out-Null + + Write-Host "Extracting '$file' to '$extractDir'..." + + # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early. + # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the + # error. Save output so it can be stored in the exception string along with context. + $output = tar -xf $file -C $extractDir 2>&1 + # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we + # don't have access to the outer scope. + if ($LASTEXITCODE -ne 0) { + throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'" + } + + Write-Host "Extracted to $extractDir" + } + } + + Receive-Job $jobs -Wait + } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1 new file mode 100644 index 0000000..f031ed5 --- /dev/null +++ b/eng/common/sdl/extract-artifact-packages.ps1 @@ -0,0 +1,82 @@ +param( + [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored + [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true + +function ExtractArtifacts { + if (!(Test-Path $InputPath)) { + Write-Host "Input Path does not exist: $InputPath" + ExitWithExitCode 0 + } + $Jobs = @() + Get-ChildItem "$InputPath\*.nupkg" | + ForEach-Object { + $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName + } + + foreach ($Job in $Jobs) { + Wait-Job -Id $Job.Id | Receive-Job + } +} + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + $ExtractPackage = { + param( + [string] $PackagePath # Full path to a NuGet package + ) + + if (!(Test-Path $PackagePath)) { + Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath" + ExitWithExitCode 1 + } + + $RelevantExtensions = @('.dll', '.exe', '.pdb') + Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...' + + $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) + $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId + + Add-Type -AssemblyName System.IO.Compression.FileSystem + + [System.IO.Directory]::CreateDirectory($ExtractPath); + + try { + $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) + + $zip.Entries | + Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | + ForEach-Object { + $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName) + [System.IO.Directory]::CreateDirectory($TargetPath); + + $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile) + } + } + catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 + } + finally { + $zip.Dispose() + } + } + Measure-Command { ExtractArtifacts } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1 new file mode 100644 index 0000000..3ac1d92 --- /dev/null +++ b/eng/common/sdl/init-sdl.ps1 @@ -0,0 +1,55 @@ +Param( + [string] $GuardianCliLocation, + [string] $Repository, + [string] $BranchName='master', + [string] $WorkingDirectory, + [string] $AzureDevOpsAccessToken, + [string] $GuardianLoggerLevel='Standard' +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +# `tools.ps1` checks $ci to perform some actions. Since the SDL +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +. $PSScriptRoot\..\tools.ps1 + +# Don't display the console progress UI - it's a huge perf hit +$ProgressPreference = 'SilentlyContinue' + +# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file +$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken")) +$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn") +$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0" +$zipFile = "$WorkingDirectory/gdn.zip" + +Add-Type -AssemblyName System.IO.Compression.FileSystem +$gdnFolder = (Join-Path $WorkingDirectory '.gdn') + +try { + # if the folder does not exist, we'll do a guardian init and push it to the remote repository + Write-Host 'Initializing Guardian...' + Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel" + & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + # We create the mainbaseline so it can be edited later + Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline" + & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + ExitWithExitCode 0 +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config new file mode 100644 index 0000000..4585cfd --- /dev/null +++ b/eng/common/sdl/packages.config @@ -0,0 +1,4 @@ + + + + diff --git a/eng/common/sdl/push-gdn.ps1 b/eng/common/sdl/push-gdn.ps1 new file mode 100644 index 0000000..d8fd2d8 --- /dev/null +++ b/eng/common/sdl/push-gdn.ps1 @@ -0,0 +1,69 @@ +Param( + [string] $Repository, + [string] $BranchName='master', + [string] $GdnFolder, + [string] $AzureDevOpsAccessToken, + [string] $PushReason +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +$disableConfigureToolsetImport = $true +$LASTEXITCODE = 0 + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + # We create the temp directory where we'll store the sdl-config repository + $sdlDir = Join-Path $env:TEMP 'sdl' + if (Test-Path $sdlDir) { + Remove-Item -Force -Recurse $sdlDir + } + + Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir" + git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git clone failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + # We copy the .gdn folder from our local run into the git repository so it can be committed + $sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) '.gdn' + if (Get-Command Robocopy) { + Robocopy /S $GdnFolder $sdlRepositoryFolder + } else { + rsync -r $GdnFolder $sdlRepositoryFolder + } + # cd to the sdl-config directory so we can run git there + Push-Location $sdlDir + # git add . --> git commit --> git push + Write-Host 'git add .' + git add . + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git add failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`"" + git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName" + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git commit failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + Write-Host 'git push' + git push + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git push failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + + # Return to the original directory + Pop-Location +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1 new file mode 100644 index 0000000..2eac8c7 --- /dev/null +++ b/eng/common/sdl/run-sdl.ps1 @@ -0,0 +1,49 @@ +Param( + [string] $GuardianCliLocation, + [string] $WorkingDirectory, + [string] $GdnFolder, + [string] $UpdateBaseline, + [string] $GuardianLoggerLevel='Standard' +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + # We store config files in the r directory of .gdn + $gdnConfigPath = Join-Path $GdnFolder 'r' + $ValidPath = Test-Path $GuardianCliLocation + + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." + ExitWithExitCode 1 + } + + $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig' + Write-Host "Discovered Guardian config files:" + $gdnConfigFiles | Out-String | Write-Host + + Exec-BlockVerbosely { + & $GuardianCliLocation run ` + --working-directory $WorkingDirectory ` + --baseline mainbaseline ` + --update-baseline $UpdateBaseline ` + --logger-level $GuardianLoggerLevel ` + --config @gdnConfigFiles + Exit-IfNZEC "Sdl" + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/eng/common/sdl/sdl.ps1 b/eng/common/sdl/sdl.ps1 new file mode 100644 index 0000000..648c506 --- /dev/null +++ b/eng/common/sdl/sdl.ps1 @@ -0,0 +1,38 @@ + +function Install-Gdn { + param( + [Parameter(Mandatory=$true)] + [string]$Path, + + # If omitted, install the latest version of Guardian, otherwise install that specific version. + [string]$Version + ) + + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + $disableConfigureToolsetImport = $true + $global:LASTEXITCODE = 0 + + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + $argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache") + + if ($Version) { + $argumentList += "-Version $Version" + } + + Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait + + $gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path + + if (!$gdnCliPath) + { + Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian' + } + + return $gdnCliPath.FullName +} \ No newline at end of file diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1 new file mode 100644 index 0000000..a2e0048 --- /dev/null +++ b/eng/common/sdl/trim-assets-version.ps1 @@ -0,0 +1,75 @@ +<# +.SYNOPSIS +Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name. + +.PARAMETER InputPath +Full path to directory where artifact packages are stored + +.PARAMETER Recursive +Search for NuGet packages recursively + +#> + +Param( + [string] $InputPath, + [bool] $Recursive = $true +) + +$CliToolName = "Microsoft.DotNet.VersionTools.Cli" + +function Install-VersionTools-Cli { + param( + [Parameter(Mandatory=$true)][string]$Version + ) + + Write-Host "Installing the package '$CliToolName' with a version of '$version' ..." + $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" + + $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed") + Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait +} + +# ------------------------------------------------------------------- + +if (!(Test-Path $InputPath)) { + Write-Host "Input Path '$InputPath' does not exist" + ExitWithExitCode 1 +} + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +# `tools.ps1` checks $ci to perform some actions. Since the SDL +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +. $PSScriptRoot\..\tools.ps1 + +try { + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + + $toolsetVersion = Read-ArcadeSdkVersion + Install-VersionTools-Cli -Version $toolsetVersion + + $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName}) + if ($null -eq $cliToolFound) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed." + ExitWithExitCode 1 + } + + Exec-BlockVerbosely { + & "$dotnet" $CliToolName trim-assets-version ` + --assets-path $InputPath ` + --recursive $Recursive + Exit-IfNZEC "Sdl" + } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} \ No newline at end of file diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml new file mode 100644 index 0000000..7870f93 --- /dev/null +++ b/eng/common/templates/job/execute-sdl.yml @@ -0,0 +1,139 @@ +parameters: + enable: 'false' # Whether the SDL validation job should execute or not + overrideParameters: '' # Optional: to override values for parameters. + additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth + # diagnosis of problems with specific tool configurations. + publishGuardianDirectoryToPipeline: false + # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL + # parameters rather than relying on YAML. It may be better to use a local script, because you can + # reproduce results locally without piecing together a command based on the YAML. + executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' + # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named + # 'continueOnError', the parameter value is not correctly picked up. + # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter + sdlContinueOnError: false # optional: determines whether to continue the build if the step errors; + # optional: determines if build artifacts should be downloaded. + downloadArtifacts: true + # optional: determines if this job should search the directory of downloaded artifacts for + # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks. + extractArchiveArtifacts: false + dependsOn: '' # Optional: dependencies of the job + artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts + # Usage: + # artifactNames: + # - 'BlobArtifacts' + # - 'Artifacts_Windows_NT_Release' + # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts, + # not pipeline artifacts, so doesn't affect the use of this parameter. + pipelineArtifactNames: [] + +jobs: +- job: Run_SDL + dependsOn: ${{ parameters.dependsOn }} + displayName: Run SDL tool + condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true')) + variables: + - group: DotNet-VSTS-Bot + - name: AzDOProjectName + value: ${{ parameters.AzDOProjectName }} + - name: AzDOPipelineId + value: ${{ parameters.AzDOPipelineId }} + - name: AzDOBuildId + value: ${{ parameters.AzDOBuildId }} + - template: /eng/common/templates/variables/sdl-variables.yml + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + - template: /eng/common/templates/variables/pool-providers.yml + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + steps: + - checkout: self + clean: true + + # If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars. + - ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}: + - template: /eng/common/templates/post-build/setup-maestro-vars.yml + + - ${{ if ne(parameters.downloadArtifacts, 'false')}}: + - ${{ if ne(parameters.artifactNames, '') }}: + - ${{ each artifactName in parameters.artifactNames }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Build Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: ${{ artifactName }} + downloadPath: $(Build.ArtifactStagingDirectory)\artifacts + checkDownloadedFiles: true + - ${{ if eq(parameters.artifactNames, '') }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Build Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + downloadType: specific files + itemPattern: "**" + downloadPath: $(Build.ArtifactStagingDirectory)\artifacts + checkDownloadedFiles: true + + - ${{ each artifactName in parameters.pipelineArtifactNames }}: + - task: DownloadPipelineArtifact@2 + displayName: Download Pipeline Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: ${{ artifactName }} + downloadPath: $(Build.ArtifactStagingDirectory)\artifacts + checkDownloadedFiles: true + + - powershell: eng/common/sdl/trim-assets-version.ps1 + -InputPath $(Build.ArtifactStagingDirectory)\artifacts + displayName: Trim the version from the NuGet packages + continueOnError: ${{ parameters.sdlContinueOnError }} + + - powershell: eng/common/sdl/extract-artifact-packages.ps1 + -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts + -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts + displayName: Extract Blob Artifacts + continueOnError: ${{ parameters.sdlContinueOnError }} + + - powershell: eng/common/sdl/extract-artifact-packages.ps1 + -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts + -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts + displayName: Extract Package Artifacts + continueOnError: ${{ parameters.sdlContinueOnError }} + + - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}: + - powershell: eng/common/sdl/extract-artifact-archives.ps1 + -InputPath $(Build.ArtifactStagingDirectory)\artifacts + -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts + displayName: Extract Archive Artifacts + continueOnError: ${{ parameters.sdlContinueOnError }} + + - template: /eng/common/templates/steps/execute-sdl.yml + parameters: + overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} + executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} + overrideParameters: ${{ parameters.overrideParameters }} + additionalParameters: ${{ parameters.additionalParameters }} + publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} + sdlContinueOnError: ${{ parameters.sdlContinueOnError }} diff --git a/eng/common/templates/job/generate-graph-files.yml b/eng/common/templates/job/generate-graph-files.yml new file mode 100644 index 0000000..e54ce95 --- /dev/null +++ b/eng/common/templates/job/generate-graph-files.yml @@ -0,0 +1,48 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: {} + + # Optional: Include toolset dependencies in the generated graph files + includeToolset: false + +jobs: +- job: Generate_Graph_Files + + dependsOn: ${{ parameters.dependsOn }} + + displayName: Generate Graph Files + + pool: ${{ parameters.pool }} + + variables: + # Publish-Build-Assets provides: MaestroAccessToken, BotAccount-dotnet-maestro-bot-PAT + # DotNet-AllOrgs-Darc-Pats provides: dn-bot-devdiv-dnceng-rw-code-pat + - group: Publish-Build-Assets + - group: DotNet-AllOrgs-Darc-Pats + - name: _GraphArguments + value: -gitHubPat $(BotAccount-dotnet-maestro-bot-PAT) + -azdoPat $(dn-bot-devdiv-dnceng-rw-code-pat) + -barToken $(MaestroAccessToken) + -outputFolder '$(Build.StagingDirectory)/GraphFiles/' + - ${{ if ne(parameters.includeToolset, 'false') }}: + - name: _GraphArguments + value: ${{ variables._GraphArguments }} -includeToolset + + steps: + - task: PowerShell@2 + displayName: Generate Graph Files + inputs: + filePath: eng\common\generate-graph-files.ps1 + arguments: $(_GraphArguments) + continueOnError: true + - task: PublishBuildArtifacts@1 + displayName: Publish Graph to Artifacts + inputs: + PathtoPublish: '$(Build.StagingDirectory)/GraphFiles' + PublishLocation: Container + ArtifactName: GraphFiles + continueOnError: true + condition: always() diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml new file mode 100644 index 0000000..e20ee3a --- /dev/null +++ b/eng/common/templates/job/job.yml @@ -0,0 +1,255 @@ +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +parameters: +# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + cancelTimeoutInMinutes: '' + condition: '' + container: '' + continueOnError: false + dependsOn: '' + displayName: '' + pool: '' + steps: [] + strategy: '' + timeoutInMinutes: '' + variables: [] + workspace: '' + +# Job base template specific parameters + # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md + artifacts: '' + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishBuildAssets: false + enablePublishTestResults: false + enablePublishUsingPipelines: false + enableBuildRetry: false + disableComponentGovernance: '' + componentGovernanceIgnoreDirectories: '' + mergeTestResults: false + testRunTitle: '' + testResultsFormat: '' + name: '' + preSteps: [] + runAsPublic: false +# Sbom related params + enableSbom: true + PackageVersion: 7.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + +jobs: +- job: ${{ parameters.name }} + + ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: + cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} + + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + + ${{ if ne(parameters.container, '') }}: + container: ${{ parameters.container }} + + ${{ if ne(parameters.continueOnError, '') }}: + continueOnError: ${{ parameters.continueOnError }} + + ${{ if ne(parameters.dependsOn, '') }}: + dependsOn: ${{ parameters.dependsOn }} + + ${{ if ne(parameters.displayName, '') }}: + displayName: ${{ parameters.displayName }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + + ${{ if ne(parameters.strategy, '') }}: + strategy: ${{ parameters.strategy }} + + ${{ if ne(parameters.timeoutInMinutes, '') }}: + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + variables: + - ${{ if ne(parameters.enableTelemetry, 'false') }}: + - name: DOTNET_CLI_TELEMETRY_PROFILE + value: '$(Build.Repository.Uri)' + - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: + - name: EnableRichCodeNavigation + value: 'true' + # Retry signature validation up to three times, waiting 2 seconds between attempts. + # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures + - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY + value: 3,2000 + - ${{ each variable in parameters.variables }}: + # handle name-value variable syntax + # example: + # - name: [key] + # value: [value] + - ${{ if ne(variable.name, '') }}: + - name: ${{ variable.name }} + value: ${{ variable.value }} + + # handle variable groups + - ${{ if ne(variable.group, '') }}: + - group: ${{ variable.group }} + + # handle template variable syntax + # example: + # - template: path/to/template.yml + # parameters: + # [key]: [value] + - ${{ if ne(variable.template, '') }}: + - template: ${{ variable.template }} + ${{ if ne(variable.parameters, '') }}: + parameters: ${{ variable.parameters }} + + # handle key-value variable syntax. + # example: + # - [key]: [value] + - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: + - ${{ each pair in variable }}: + - name: ${{ pair.key }} + value: ${{ pair.value }} + + # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds + - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: DotNet-HelixApi-Access + + ${{ if ne(parameters.workspace, '') }}: + workspace: ${{ parameters.workspace }} + + steps: + - ${{ if ne(parameters.preSteps, '') }}: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - task: MicroBuildSigningPlugin@3 + displayName: Install MicroBuild plugin + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + env: + TeamName: $(_TeamName) + continueOnError: ${{ parameters.continueOnError }} + condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + + - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: + - task: NuGetAuthenticate@0 + + - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: + - task: DownloadPipelineArtifact@2 + inputs: + buildType: current + artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} + targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} + itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} + + - ${{ each step in parameters.steps }}: + - ${{ step }} + + - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: + - task: RichCodeNavIndexer@0 + displayName: RichCodeNav Upload + inputs: + languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} + environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }} + richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin + uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} + continueOnError: true + + - template: /eng/common/templates/steps/component-governance.yml + parameters: + ${{ if eq(parameters.disableComponentGovernance, '') }}: + ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: + disableComponentGovernance: false + ${{ else }}: + disableComponentGovernance: true + ${{ else }}: + disableComponentGovernance: ${{ parameters.disableComponentGovernance }} + componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: MicroBuildCleanup@1 + displayName: Execute Microbuild cleanup tasks + condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + env: + TeamName: $(_TeamName) + + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - task: CopyFiles@2 + displayName: Gather binaries for publish to artifacts + inputs: + SourceFolder: 'artifacts/bin' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' + - task: CopyFiles@2 + displayName: Gather packages for publish to artifacts + inputs: + SourceFolder: 'artifacts/packages' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' + - task: PublishBuildArtifacts@1 + displayName: Publish pipeline artifacts + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + PublishLocation: Container + ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + continueOnError: true + condition: always() + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - publish: artifacts/log + artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} + displayName: Publish logs + continueOnError: true + condition: always() + + - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: + - task: PublishBuildArtifacts@1 + displayName: Publish Logs + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' + PublishLocation: Container + ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} + continueOnError: true + condition: always() + + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: + - task: PublishTestResults@2 + displayName: Publish XUnit Test Results + inputs: + testResultsFormat: 'xUnit' + testResultsFiles: '*.xml' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: + - task: PublishTestResults@2 + displayName: Publish TRX Test Results + inputs: + testResultsFormat: 'VSTest' + testResultsFiles: '*.trx' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - template: /eng/common/templates/steps/generate-sbom.yml + parameters: + PackageVersion: ${{ parameters.packageVersion}} + BuildDropPath: ${{ parameters.buildDropPath }} + IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration + artifact: BuildConfiguration + displayName: Publish build retry configuration + continueOnError: true diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml new file mode 100644 index 0000000..60ab00c --- /dev/null +++ b/eng/common/templates/job/onelocbuild.yml @@ -0,0 +1,109 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: '' + + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex + GithubPat: $(BotAccount-dotnet-bot-repo-PAT) + + SourcesDirectory: $(Build.SourcesDirectory) + CreatePr: true + AutoCompletePr: false + ReusePr: true + UseLfLineEndings: true + UseCheckedInLocProjectJson: false + SkipLocProjectJsonGeneration: false + LanguageSet: VS_Main_Languages + LclSource: lclFilesInRepo + LclPackageId: '' + RepoType: gitHub + GitHubOrg: dotnet + MirrorRepo: '' + MirrorBranch: main + condition: '' + JobNameSuffix: '' + +jobs: +- job: OneLocBuild${{ parameters.JobNameSuffix }} + + dependsOn: ${{ parameters.dependsOn }} + + displayName: OneLocBuild${{ parameters.JobNameSuffix }} + + variables: + - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat + - name: _GenerateLocProjectArguments + value: -SourcesDirectory ${{ parameters.SourcesDirectory }} + -LanguageSet "${{ parameters.LanguageSet }}" + -CreateNeutralXlfs + - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: + - name: _GenerateLocProjectArguments + value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson + - template: /eng/common/templates/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + + steps: + - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + arguments: $(_GenerateLocProjectArguments) + displayName: Generate LocProject.json + condition: ${{ parameters.condition }} + + - task: OneLocBuild@2 + displayName: OneLocBuild + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + inputs: + locProj: eng/Localize/LocProject.json + outDir: $(Build.ArtifactStagingDirectory) + lclSource: ${{ parameters.LclSource }} + lclPackageId: ${{ parameters.LclPackageId }} + isCreatePrSelected: ${{ parameters.CreatePr }} + isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} + ${{ if eq(parameters.CreatePr, true) }}: + isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + isShouldReusePrSelected: ${{ parameters.ReusePr }} + packageSourceAuth: patAuth + patVariable: ${{ parameters.CeapexPat }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + repoType: ${{ parameters.RepoType }} + gitHubPatVariable: "${{ parameters.GithubPat }}" + ${{ if ne(parameters.MirrorRepo, '') }}: + isMirrorRepoSelected: true + gitHubOrganization: ${{ parameters.GitHubOrg }} + mirrorRepo: ${{ parameters.MirrorRepo }} + mirrorBranch: ${{ parameters.MirrorBranch }} + condition: ${{ parameters.condition }} + + - task: PublishBuildArtifacts@1 + displayName: Publish Localization Files + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} + + - task: PublishBuildArtifacts@1 + displayName: Publish LocProject.json + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates/job/performance.yml b/eng/common/templates/job/performance.yml new file mode 100644 index 0000000..f877fd7 --- /dev/null +++ b/eng/common/templates/job/performance.yml @@ -0,0 +1,95 @@ +parameters: + steps: [] # optional -- any additional steps that need to happen before pulling down the performance repo and sending the performance benchmarks to helix (ie building your repo) + variables: [] # optional -- list of additional variables to send to the template + jobName: '' # required -- job name + displayName: '' # optional -- display name for the job. Will use jobName if not passed + pool: '' # required -- name of the Build pool + container: '' # required -- name of the container + osGroup: '' # required -- operating system for the job + extraSetupParameters: '' # optional -- extra arguments to pass to the setup script + frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against + continueOnError: 'false' # optional -- determines whether to continue the build if the step errors + dependsOn: '' # optional -- dependencies of the job + timeoutInMinutes: 320 # optional -- timeout for the job + enableTelemetry: false # optional -- enable for telemetry + +jobs: +- template: ../jobs/jobs.yml + parameters: + dependsOn: ${{ parameters.dependsOn }} + enableTelemetry: ${{ parameters.enableTelemetry }} + enablePublishBuildArtifacts: true + continueOnError: ${{ parameters.continueOnError }} + + jobs: + - job: '${{ parameters.jobName }}' + + ${{ if ne(parameters.displayName, '') }}: + displayName: '${{ parameters.displayName }}' + ${{ if eq(parameters.displayName, '') }}: + displayName: '${{ parameters.jobName }}' + + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + variables: + + - ${{ each variable in parameters.variables }}: + - ${{ if ne(variable.name, '') }}: + - name: ${{ variable.name }} + value: ${{ variable.value }} + - ${{ if ne(variable.group, '') }}: + - group: ${{ variable.group }} + + - IsInternal: '' + - HelixApiAccessToken: '' + - HelixPreCommand: '' + + - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if eq( parameters.osGroup, 'Windows_NT') }}: + - HelixPreCommand: 'set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"' + - IsInternal: -Internal + - ${{ if ne(parameters.osGroup, 'Windows_NT') }}: + - HelixPreCommand: 'export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"' + - IsInternal: --internal + + - group: DotNet-HelixApi-Access + - group: dotnet-benchview + + workspace: + clean: all + pool: + ${{ parameters.pool }} + container: ${{ parameters.container }} + strategy: + matrix: + ${{ each framework in parameters.frameworks }}: + ${{ framework }}: + _Framework: ${{ framework }} + steps: + - checkout: self + clean: true + # Run all of the steps to setup repo + - ${{ each step in parameters.steps }}: + - ${{ step }} + - powershell: $(Build.SourcesDirectory)\eng\common\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) ${{ parameters.extraSetupParameters }} + displayName: Performance Setup (Windows) + condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + - script: $(Build.SourcesDirectory)/eng/common/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) ${{ parameters.extraSetupParameters }} + displayName: Performance Setup (Unix) + condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + - script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments) + displayName: Run ci setup script + # Run perf testing in helix + - template: /eng/common/templates/steps/perf-send-to-helix.yml + parameters: + HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/ + HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)' + HelixAccessToken: $(HelixApiAccessToken) + HelixTargetQueues: $(Queue) + HelixPreCommands: $(HelixPreCommand) + Creator: $(Creator) + WorkItemTimeout: 4:00 # 4 hours + WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy + CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions \ No newline at end of file diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml new file mode 100644 index 0000000..4201710 --- /dev/null +++ b/eng/common/templates/job/publish-build-assets.yml @@ -0,0 +1,151 @@ +parameters: + configuration: 'Debug' + + # Optional: condition for the job to run + condition: '' + + # Optional: 'true' if future jobs should run even if this job fails + continueOnError: false + + # Optional: dependencies of the job + dependsOn: '' + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: {} + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishUsingPipelines: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishAssetsImmediately: false + + artifactsPublishingAdditionalParameters: '' + + signingValidationAdditionalParameters: '' + +jobs: +- job: Asset_Registry_Publish + + dependsOn: ${{ parameters.dependsOn }} + timeoutInMinutes: 150 + + ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + displayName: Publish Assets + ${{ else }}: + displayName: Publish to Build Asset Registry + + variables: + - template: /eng/common/templates/variables/pool-providers.yml + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: Publish-Build-Assets + - group: AzureDevOps-Artifact-Feeds-Pats + - name: runCodesignValidationInjection + value: false + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/templates/post-build/common-variables.yml + + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + + steps: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download artifact + inputs: + artifactName: AssetManifests + downloadPath: '$(Build.StagingDirectory)/Download' + checkDownloadedFiles: true + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: NuGetAuthenticate@0 + + - task: PowerShell@2 + displayName: Publish Build Assets + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet + /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' + /p:BuildAssetRegistryToken=$(MaestroAccessToken) + /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com + /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} + /p:OfficialBuildId=$(Build.BuildNumber) + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: powershell@2 + displayName: Create ReleaseConfigs Artifact + inputs: + targetType: inline + script: | + Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId) + Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)" + Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild) + + - task: PublishBuildArtifacts@1 + displayName: Publish ReleaseConfigs Artifact + inputs: + PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt' + PublishLocation: Container + ArtifactName: ReleaseConfigs + + - task: powershell@2 + displayName: Check if SymbolPublishingExclusionsFile.txt exists + inputs: + targetType: inline + script: | + $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + if(Test-Path -Path $symbolExclusionfile) + { + Write-Host "SymbolExclusionFile exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" + } + else{ + Write-Host "Symbols Exclusion file does not exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" + } + + - task: PublishBuildArtifacts@1 + displayName: Publish SymbolPublishingExclusionsFile Artifact + condition: eq(variables['SymbolExclusionFile'], 'true') + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' + PublishLocation: Container + ArtifactName: ReleaseConfigs + + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: PowerShell@2 + displayName: Publish Using Darc + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion 3 + -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' + -MaestroToken '$(MaestroApiAccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - template: /eng/common/templates/steps/publish-logs.yml + parameters: + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml new file mode 100644 index 0000000..8a3deef --- /dev/null +++ b/eng/common/templates/job/source-build.yml @@ -0,0 +1,66 @@ +parameters: + # This template adds arcade-powered source-build to CI. The template produces a server job with a + # default ID 'Source_Build_Complete' to put in a dependency list if necessary. + + # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. + jobNamePrefix: 'Source_Build' + + # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for + # managed-only repositories. This is an object with these properties: + # + # name: '' + # The name of the job. This is included in the job ID. + # targetRID: '' + # The name of the target RID to use, instead of the one auto-detected by Arcade. + # nonPortable: false + # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than + # linux-x64), and compiling against distro-provided packages rather than portable ones. + # skipPublishValidation: false + # Disables publishing validation. By default, a check is performed to ensure no packages are + # published by source-build. + # container: '' + # A container to use. Runs in docker. + # pool: {} + # A pool to use. Runs directly on an agent. + # buildScript: '' + # Specifies the build script to invoke to perform the build in the repo. The default + # './build.sh' should work for typical Arcade repositories, but this is customizable for + # difficult situations. + # jobProperties: {} + # A list of job properties to inject at the top level, for potential extensibility beyond + # container and pool. + platform: {} + +jobs: +- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} + displayName: Source-Build (${{ parameters.platform.name }}) + + ${{ each property in parameters.platform.jobProperties }}: + ${{ property.key }}: ${{ property.value }} + + ${{ if ne(parameters.platform.container, '') }}: + container: ${{ parameters.platform.container }} + + ${{ if eq(parameters.platform.pool, '') }}: + # The default VM host AzDO pool. This should be capable of running Docker containers: almost all + # source-build builds run in Docker, including the default managed platform. + # /eng/common/templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] + demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open + + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] + demands: ImageOverride -equals Build.Ubuntu.1804.Amd64 + + ${{ if ne(parameters.platform.pool, '') }}: + pool: ${{ parameters.platform.pool }} + + workspace: + clean: all + + steps: + - template: /eng/common/templates/steps/source-build.yml + parameters: + platform: ${{ parameters.platform }} diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml new file mode 100644 index 0000000..b98202a --- /dev/null +++ b/eng/common/templates/job/source-index-stage1.yml @@ -0,0 +1,67 @@ +parameters: + runAsPublic: false + sourceIndexPackageVersion: 1.0.1-20230228.2 + sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json + sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" + preSteps: [] + binlogPath: artifacts/log/Debug/Build.binlog + condition: '' + dependsOn: '' + pool: '' + +jobs: +- job: SourceIndexStage1 + dependsOn: ${{ parameters.dependsOn }} + condition: ${{ parameters.condition }} + variables: + - name: SourceIndexPackageVersion + value: ${{ parameters.sourceIndexPackageVersion }} + - name: SourceIndexPackageSource + value: ${{ parameters.sourceIndexPackageSource }} + - name: BinlogPath + value: ${{ parameters.binlogPath }} + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: source-dot-net stage1 variables + - template: /eng/common/templates/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $(DncEngPublicBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64.open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + + steps: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - task: UseDotNet@2 + displayName: Use .NET Core SDK 6 + inputs: + packageType: sdk + version: 6.0.x + installationPath: $(Agent.TempDirectory)/dotnet + workingDirectory: $(Agent.TempDirectory) + + - script: | + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + displayName: Download Tools + # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. + workingDirectory: $(Agent.TempDirectory) + + - script: ${{ parameters.sourceIndexBuildCommand }} + displayName: Build Repository + + - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output + displayName: Process Binlog into indexable sln + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) + displayName: Upload stage1 artifacts to source index + env: + BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url) diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml new file mode 100644 index 0000000..f7dc5ea --- /dev/null +++ b/eng/common/templates/jobs/codeql-build.yml @@ -0,0 +1,31 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + +jobs: +- template: /eng/common/templates/jobs/jobs.yml + parameters: + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishTestResults: false + enablePublishBuildAssets: false + enablePublishUsingPipelines: false + enableTelemetry: true + + variables: + - group: Publish-Build-Assets + # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in + # sync with the packages.config file. + - name: DefaultGuardianVersion + value: 0.109.0 + - name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + + jobs: ${{ parameters.jobs }} + diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml new file mode 100644 index 0000000..289bb23 --- /dev/null +++ b/eng/common/templates/jobs/jobs.yml @@ -0,0 +1,97 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: Enable publishing using release pipelines + enablePublishUsingPipelines: false + + # Optional: Enable running the source-build jobs to build repo from source + enableSourceBuild: false + + # Optional: Parameters for source-build template. + # See /eng/common/templates/jobs/source-build.yml for options + sourceBuildParameters: [] + + graphFileGeneration: + # Optional: Enable generating the graph files at the end of the build + enabled: false + # Optional: Include toolset dependencies in the generated graph files + includeToolset: false + + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + + # Optional: Override automatically derived dependsOn value for "publish build assets" job + publishBuildAssetsDependsOn: '' + + # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. + publishAssetsImmediately: false + + # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) + artifactsPublishingAdditionalParameters: '' + signingValidationAdditionalParameters: '' + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + enableSourceIndex: false + sourceIndexParams: {} + +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +jobs: +- ${{ each job in parameters.jobs }}: + - template: ../job/job.yml + parameters: + # pass along parameters + ${{ each parameter in parameters }}: + ${{ if ne(parameter.key, 'jobs') }}: + ${{ parameter.key }}: ${{ parameter.value }} + + # pass along job properties + ${{ each property in job }}: + ${{ if ne(property.key, 'job') }}: + ${{ property.key }}: ${{ property.value }} + + name: ${{ job.job }} + +- ${{ if eq(parameters.enableSourceBuild, true) }}: + - template: /eng/common/templates/jobs/source-build.yml + parameters: + allCompletedJobId: Source_Build_Complete + ${{ each parameter in parameters.sourceBuildParameters }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if eq(parameters.enableSourceIndex, 'true') }}: + - template: ../job/source-index-stage1.yml + parameters: + runAsPublic: ${{ parameters.runAsPublic }} + ${{ each parameter in parameters.sourceIndexParams }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: + - template: ../job/publish-build-assets.yml + parameters: + continueOnError: ${{ parameters.continueOnError }} + dependsOn: + - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.publishBuildAssetsDependsOn }}: + - ${{ job.job }} + - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.jobs }}: + - ${{ job.job }} + - ${{ if eq(parameters.enableSourceBuild, true) }}: + - Source_Build_Complete + + runAsPublic: ${{ parameters.runAsPublic }} + publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} + publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} + enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml new file mode 100644 index 0000000..a15b07e --- /dev/null +++ b/eng/common/templates/jobs/source-build.yml @@ -0,0 +1,46 @@ +parameters: + # This template adds arcade-powered source-build to CI. A job is created for each platform, as + # well as an optional server job that completes when all platform jobs complete. + + # The name of the "join" job for all source-build platforms. If set to empty string, the job is + # not included. Existing repo pipelines can use this job depend on all source-build jobs + # completing without maintaining a separate list of every single job ID: just depend on this one + # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. + allCompletedJobId: '' + + # See /eng/common/templates/job/source-build.yml + jobNamePrefix: 'Source_Build' + + # This is the default platform provided by Arcade, intended for use by a managed-only repo. + defaultManagedPlatform: + name: 'Managed' + container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' + + # Defines the platforms on which to run build jobs. One job is created for each platform, and the + # object in this array is sent to the job template as 'platform'. If no platforms are specified, + # one job runs on 'defaultManagedPlatform'. + platforms: [] + +jobs: + +- ${{ if ne(parameters.allCompletedJobId, '') }}: + - job: ${{ parameters.allCompletedJobId }} + displayName: Source-Build Complete + pool: server + dependsOn: + - ${{ each platform in parameters.platforms }}: + - ${{ parameters.jobNamePrefix }}_${{ platform.name }} + - ${{ if eq(length(parameters.platforms), 0) }}: + - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} + +- ${{ each platform in parameters.platforms }}: + - template: /eng/common/templates/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ platform }} + +- ${{ if eq(length(parameters.platforms), 0) }}: + - template: /eng/common/templates/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ parameters.defaultManagedPlatform }} diff --git a/eng/common/templates/phases/base.yml b/eng/common/templates/phases/base.yml new file mode 100644 index 0000000..0123cf4 --- /dev/null +++ b/eng/common/templates/phases/base.yml @@ -0,0 +1,130 @@ +parameters: + # Optional: Clean sources before building + clean: true + + # Optional: Git fetch depth + fetchDepth: '' + + # Optional: name of the phase (not specifying phase name may cause name collisions) + name: '' + # Optional: display name of the phase + displayName: '' + + # Optional: condition for the job to run + condition: '' + + # Optional: dependencies of the phase + dependsOn: '' + + # Required: A defined YAML queue + queue: {} + + # Required: build steps + steps: [] + + # Optional: variables + variables: {} + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + ## Telemetry variables + + # Optional: enable sending telemetry + # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix + # _HelixBuildConfig - differentiate between Debug, Release, other + # _HelixSource - Example: build/product + # _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch) + enableTelemetry: false + + # Optional: Enable installing Microbuild plugin + # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix + # _TeamName - the name of your team + # _SignType - 'test' or 'real' + enableMicrobuild: false + +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +phases: +- phase: ${{ parameters.name }} + + ${{ if ne(parameters.displayName, '') }}: + displayName: ${{ parameters.displayName }} + + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + + ${{ if ne(parameters.dependsOn, '') }}: + dependsOn: ${{ parameters.dependsOn }} + + queue: ${{ parameters.queue }} + + ${{ if ne(parameters.variables, '') }}: + variables: + ${{ insert }}: ${{ parameters.variables }} + + steps: + - checkout: self + clean: ${{ parameters.clean }} + ${{ if ne(parameters.fetchDepth, '') }}: + fetchDepth: ${{ parameters.fetchDepth }} + + - ${{ if eq(parameters.enableTelemetry, 'true') }}: + - template: /eng/common/templates/steps/telemetry-start.yml + parameters: + buildConfig: $(_HelixBuildConfig) + helixSource: $(_HelixSource) + helixType: $(_HelixType) + runAsPublic: ${{ parameters.runAsPublic }} + + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + # Internal only resource, and Microbuild signing shouldn't be applied to PRs. + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: MicroBuildSigningPlugin@2 + displayName: Install MicroBuild plugin + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + + env: + TeamName: $(_TeamName) + continueOnError: false + condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + + # Run provided build steps + - ${{ parameters.steps }} + + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + # Internal only resources + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: MicroBuildCleanup@1 + displayName: Execute Microbuild cleanup tasks + condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + env: + TeamName: $(_TeamName) + + - ${{ if eq(parameters.enableTelemetry, 'true') }}: + - template: /eng/common/templates/steps/telemetry-end.yml + parameters: + helixSource: $(_HelixSource) + helixType: $(_HelixType) + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: CopyFiles@2 + displayName: Gather Asset Manifests + inputs: + SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest' + TargetFolder: '$(Build.StagingDirectory)/AssetManifests' + continueOnError: false + condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) + - task: PublishBuildArtifacts@1 + displayName: Push Asset Manifests + inputs: + PathtoPublish: '$(Build.StagingDirectory)/AssetManifests' + PublishLocation: Container + ArtifactName: AssetManifests + continueOnError: false + condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) diff --git a/eng/common/templates/phases/publish-build-assets.yml b/eng/common/templates/phases/publish-build-assets.yml new file mode 100644 index 0000000..4e51e47 --- /dev/null +++ b/eng/common/templates/phases/publish-build-assets.yml @@ -0,0 +1,52 @@ +parameters: + dependsOn: '' + queue: {} + configuration: 'Debug' + condition: succeeded() + continueOnError: false + runAsPublic: false + publishUsingPipelines: false +phases: + - phase: Asset_Registry_Publish + displayName: Publish to Build Asset Registry + dependsOn: ${{ parameters.dependsOn }} + queue: ${{ parameters.queue }} + variables: + _BuildConfig: ${{ parameters.configuration }} + steps: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download artifact + inputs: + artifactName: AssetManifests + downloadPath: '$(Build.StagingDirectory)/Download' + checkDownloadedFiles: true + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + - task: AzureKeyVault@1 + inputs: + azureSubscription: 'DotNet-Engineering-Services_KeyVault' + KeyVaultName: EngKeyVault + SecretsFilter: 'MaestroAccessToken' + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + - task: PowerShell@2 + displayName: Publish Build Assets + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet + /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' + /p:BuildAssetRegistryToken=$(MaestroAccessToken) + /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com + /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} + /p:Configuration=$(_BuildConfig) + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + - task: PublishBuildArtifacts@1 + displayName: Publish Logs to VSTS + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' + PublishLocation: Container + ArtifactName: $(Agent.Os)_Asset_Registry_Publish + continueOnError: true + condition: always() diff --git a/eng/common/templates/post-build/channels/generic-internal-channel.yml b/eng/common/templates/post-build/channels/generic-internal-channel.yml new file mode 100644 index 0000000..8990dfc --- /dev/null +++ b/eng/common/templates/post-build/channels/generic-internal-channel.yml @@ -0,0 +1,190 @@ +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + artifactsPublishingAdditionalParameters: '' + dependsOn: + - Validate + publishInstallersAndChecksums: true + symbolPublishingAdditionalParameters: '' + stageName: '' + channelName: '' + channelId: '' + transportFeed: '' + shippingFeed: '' + symbolsFeed: '' + +stages: +- stage: ${{ parameters.stageName }} + dependsOn: ${{ parameters.dependsOn }} + variables: + - template: ../common-variables.yml + displayName: ${{ parameters.channelName }} Publishing + jobs: + - template: ../setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - job: publish_symbols + displayName: Symbol Publishing + dependsOn: setupMaestroVars + condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} )) + variables: + - group: DotNet-Symbol-Server-Pats + - name: AzDOProjectName + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ] + - name: AzDOPipelineId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ] + - name: AzDOBuildId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ] + pool: + vmImage: 'windows-2019' + steps: + - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions." + displayName: Warn about v2 Arcade Publishing Usage + + # This is necessary whenever we want to publish/restore to an AzDO private feed + - task: NuGetAuthenticate@0 + displayName: 'Authenticate to AzDO Feeds' + + - task: DownloadBuildArtifacts@0 + displayName: Download Build Assets + continueOnError: true + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + downloadType: 'specific' + itemPattern: | + PdbArtifacts/** + BlobArtifacts/** + downloadPath: '$(Build.ArtifactStagingDirectory)' + checkDownloadedFiles: true + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@0 + displayName: 'Authenticate to AzDO Feeds' + + - task: PowerShell@2 + displayName: Enable cross-org publishing + inputs: + filePath: eng\common\enable-cross-org-publishing.ps1 + arguments: -token $(dn-bot-dnceng-artifact-feeds-rw) + + - task: PowerShell@2 + displayName: Publish + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet + /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat) + /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat) + /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/' + /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/' + /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' + /p:Configuration=Release + /p:PublishToMSDL=false + ${{ parameters.symbolPublishingAdditionalParameters }} + + - template: ../../steps/publish-logs.yml + parameters: + StageLabel: '${{ parameters.stageName }}' + JobLabel: 'SymbolPublishing' + + - job: publish_assets + displayName: Publish Assets + dependsOn: setupMaestroVars + timeoutInMinutes: 120 + variables: + - name: BARBuildId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ] + - name: IsStableBuild + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ] + - name: AzDOProjectName + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ] + - name: AzDOPipelineId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ] + - name: AzDOBuildId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ] + condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} )) + pool: + vmImage: 'windows-2019' + steps: + - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions." + displayName: Warn about v2 Arcade Publishing Usage + + - task: DownloadBuildArtifacts@0 + displayName: Download Build Assets + continueOnError: true + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + downloadType: 'specific' + itemPattern: | + PackageArtifacts/** + BlobArtifacts/** + AssetManifests/** + downloadPath: '$(Build.ArtifactStagingDirectory)' + checkDownloadedFiles: true + + - task: NuGetToolInstaller@1 + displayName: 'Install NuGet.exe' + + # This is necessary whenever we want to publish/restore to an AzDO private feed + - task: NuGetAuthenticate@0 + displayName: 'Authenticate to AzDO Feeds' + + - task: PowerShell@2 + displayName: Enable cross-org publishing + inputs: + filePath: eng\common\enable-cross-org-publishing.ps1 + arguments: -token $(dn-bot-dnceng-artifact-feeds-rw) + + - task: PowerShell@2 + displayName: Publish Assets + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet + /p:PublishingInfraVersion=2 + /p:IsStableBuild=$(IsStableBuild) + /p:IsInternalBuild=$(IsInternalBuild) + /p:RepositoryName=$(Build.Repository.Name) + /p:CommitSha=$(Build.SourceVersion) + /p:NugetPath=$(NuGetExeToolPath) + /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)' + /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)' + /p:BARBuildId=$(BARBuildId) + /p:MaestroApiEndpoint='$(MaestroApiEndPoint)' + /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)' + /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/' + /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/' + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/' + /p:Configuration=Release + /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }} + /p:ChecksumsTargetStaticFeed=$(InternalChecksumsBlobFeedUrl) + /p:ChecksumsAzureAccountKey=$(InternalChecksumsBlobFeedKey) + /p:InstallersTargetStaticFeed=$(InternalInstallersBlobFeedUrl) + /p:InstallersAzureAccountKey=$(InternalInstallersBlobFeedKey) + /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}' + /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' + /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}' + /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' + /p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}' + /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' + /p:PublishToMSDL=false + ${{ parameters.artifactsPublishingAdditionalParameters }} + + - template: ../../steps/publish-logs.yml + parameters: + StageLabel: '${{ parameters.stageName }}' + JobLabel: 'AssetsPublishing' + + - template: ../../steps/add-build-to-channel.yml + parameters: + ChannelId: ${{ parameters.channelId }} diff --git a/eng/common/templates/post-build/channels/generic-public-channel.yml b/eng/common/templates/post-build/channels/generic-public-channel.yml new file mode 100644 index 0000000..3220c6a --- /dev/null +++ b/eng/common/templates/post-build/channels/generic-public-channel.yml @@ -0,0 +1,192 @@ +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + artifactsPublishingAdditionalParameters: '' + dependsOn: + - Validate + publishInstallersAndChecksums: true + symbolPublishingAdditionalParameters: '' + stageName: '' + channelName: '' + channelId: '' + transportFeed: '' + shippingFeed: '' + symbolsFeed: '' + # If the channel name is empty, no links will be generated + akaMSChannelName: '' + +stages: +- stage: ${{ parameters.stageName }} + dependsOn: ${{ parameters.dependsOn }} + variables: + - template: ../common-variables.yml + displayName: ${{ parameters.channelName }} Publishing + jobs: + - template: ../setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - job: publish_symbols + displayName: Symbol Publishing + dependsOn: setupMaestroVars + condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} )) + variables: + - group: DotNet-Symbol-Server-Pats + - name: AzDOProjectName + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ] + - name: AzDOPipelineId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ] + - name: AzDOBuildId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ] + pool: + vmImage: 'windows-2019' + steps: + - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions." + displayName: Warn about v2 Arcade Publishing Usage + + - task: DownloadBuildArtifacts@0 + displayName: Download Build Assets + continueOnError: true + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + downloadType: 'specific' + itemPattern: | + PdbArtifacts/** + BlobArtifacts/** + downloadPath: '$(Build.ArtifactStagingDirectory)' + checkDownloadedFiles: true + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@0 + displayName: 'Authenticate to AzDO Feeds' + + - task: PowerShell@2 + displayName: Enable cross-org publishing + inputs: + filePath: eng\common\enable-cross-org-publishing.ps1 + arguments: -token $(dn-bot-dnceng-artifact-feeds-rw) + + - task: PowerShell@2 + displayName: Publish + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet + /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat) + /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat) + /p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/' + /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/' + /p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' + /p:Configuration=Release + ${{ parameters.symbolPublishingAdditionalParameters }} + + - template: ../../steps/publish-logs.yml + parameters: + StageLabel: '${{ parameters.stageName }}' + JobLabel: 'SymbolPublishing' + + - job: publish_assets + displayName: Publish Assets + dependsOn: setupMaestroVars + timeoutInMinutes: 120 + variables: + - name: BARBuildId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ] + - name: IsStableBuild + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ] + - name: AzDOProjectName + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ] + - name: AzDOPipelineId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ] + - name: AzDOBuildId + value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ] + - name: ArtifactsCategory + value: ${{ coalesce(variables._DotNetArtifactsCategory, '.NETCore') }} + condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} )) + pool: + vmImage: 'windows-2019' + steps: + - script: echo "##vso[task.logissue type=warning]Going forward, v2 Arcade publishing is no longer supported. Please read https://github.com/dotnet/arcade/blob/main/Documentation/CorePackages/Publishing.md for details, then contact dnceng if you have further questions." + displayName: Warn about v2 Arcade Publishing Usage + + - task: DownloadBuildArtifacts@0 + displayName: Download Build Assets + continueOnError: true + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + downloadType: 'specific' + itemPattern: | + PackageArtifacts/** + BlobArtifacts/** + AssetManifests/** + downloadPath: '$(Build.ArtifactStagingDirectory)' + checkDownloadedFiles: true + + - task: NuGetToolInstaller@1 + displayName: 'Install NuGet.exe' + + # This is necessary whenever we want to publish/restore to an AzDO private feed + - task: NuGetAuthenticate@0 + displayName: 'Authenticate to AzDO Feeds' + + - task: PowerShell@2 + displayName: Enable cross-org publishing + inputs: + filePath: eng\common\enable-cross-org-publishing.ps1 + arguments: -token $(dn-bot-dnceng-artifact-feeds-rw) + + - task: PowerShell@2 + displayName: Publish Assets + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet + /p:PublishingInfraVersion=2 + /p:ArtifactsCategory=$(ArtifactsCategory) + /p:IsStableBuild=$(IsStableBuild) + /p:IsInternalBuild=$(IsInternalBuild) + /p:RepositoryName=$(Build.Repository.Name) + /p:CommitSha=$(Build.SourceVersion) + /p:NugetPath=$(NuGetExeToolPath) + /p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)' + /p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)' + /p:BARBuildId=$(BARBuildId) + /p:MaestroApiEndpoint='$(MaestroApiEndPoint)' + /p:BuildAssetRegistryToken='$(MaestroApiAccessToken)' + /p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/' + /p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/' + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/' + /p:Configuration=Release + /p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }} + /p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl) + /p:InstallersAzureAccountKey=$(dotnetcli-storage-key) + /p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl) + /p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key) + /p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}' + /p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' + /p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}' + /p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' + /p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}' + /p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)' + /p:LatestLinkShortUrlPrefix=dotnet/'${{ parameters.akaMSChannelName }}' + /p:AkaMSClientId=$(akams-client-id) + /p:AkaMSClientSecret=$(akams-client-secret) + ${{ parameters.artifactsPublishingAdditionalParameters }} + + - template: ../../steps/publish-logs.yml + parameters: + StageLabel: '${{ parameters.stageName }}' + JobLabel: 'AssetsPublishing' + + - template: ../../steps/add-build-to-channel.yml + parameters: + ChannelId: ${{ parameters.channelId }} diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml new file mode 100644 index 0000000..c24193a --- /dev/null +++ b/eng/common/templates/post-build/common-variables.yml @@ -0,0 +1,22 @@ +variables: + - group: Publish-Build-Assets + + # Whether the build is internal or not + - name: IsInternalBuild + value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} + + # Default Maestro++ API Endpoint and API Version + - name: MaestroApiEndPoint + value: "https://maestro-prod.westus2.cloudapp.azure.com" + - name: MaestroApiAccessToken + value: $(MaestroAccessToken) + - name: MaestroApiVersion + value: "2020-02-20" + + - name: SourceLinkCLIVersion + value: 3.0.0 + - name: SymbolToolVersion + value: 1.0.1 + + - name: runCodesignValidationInjection + value: false diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml new file mode 100644 index 0000000..ef720f9 --- /dev/null +++ b/eng/common/templates/post-build/post-build.yml @@ -0,0 +1,281 @@ +parameters: + # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. + # Publishing V1 is no longer supported + # Publishing V2 is no longer supported + # Publishing V3 is the default + - name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + + - name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + + - name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + + - name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + + - name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + + - name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + + - name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + + - name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + + - name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true + + # These parameters let the user customize the call to sdk-task.ps1 for publishing + # symbols & general artifacts as well as for signing validation + - name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + + - name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + + - name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' + + # Which stages should finish execution before post-build stages start + - name: validateDependsOn + type: object + default: + - build + + - name: publishDependsOn + type: object + default: + - Validate + + # Optional: Call asset publishing rather than running in a separate stage + - name: publishAssetsImmediately + type: boolean + default: false + +stages: +- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + - stage: Validate + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Validate Build Assets + variables: + - template: common-variables.yml + - template: /eng/common/templates/variables/pool-providers.yml + jobs: + - job: + displayName: NuGet Validation + condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ + + - job: + displayName: Signing Validation + condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + itemPattern: | + ** + !**/Microsoft.SourceBuild.Intermediate.*.nupkg + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@0 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task SigningValidation -restore -msbuildEngine vs + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' + /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' + ${{ parameters.signingValidationAdditionalParameters }} + + - template: ../steps/publish-logs.yml + parameters: + StageLabel: 'Validation' + JobLabel: 'Signing' + + - job: + displayName: SourceLink Validation + condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Blob Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: BlobArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) + -GHCommit $(Build.SourceVersion) + -SourcelinkCliVersion $(SourceLinkCLIVersion) + continueOnError: true + + - template: /eng/common/templates/job/execute-sdl.yml + parameters: + enable: ${{ parameters.SDLValidationParameters.enable }} + publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }} + additionalParameters: ${{ parameters.SDLValidationParameters.params }} + continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }} + artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }} + downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }} + +- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: + - stage: publish_using_darc + ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + dependsOn: ${{ parameters.publishDependsOn }} + ${{ else }}: + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Publish using Darc + variables: + - template: common-variables.yml + - template: /eng/common/templates/variables/pool-providers.yml + jobs: + - job: + displayName: Publish Using Darc + timeoutInMinutes: 120 + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: VSEngSS-MicroBuild2022-1ES + demands: Cmd + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2019.amd64 + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: NuGetAuthenticate@0 + + - task: PowerShell@2 + displayName: Publish Using Darc + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} + -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' + -MaestroToken '$(MaestroApiAccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml new file mode 100644 index 0000000..0c87f14 --- /dev/null +++ b/eng/common/templates/post-build/setup-maestro-vars.yml @@ -0,0 +1,70 @@ +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + +steps: + - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Release Configs + inputs: + buildType: current + artifactName: ReleaseConfigs + checkDownloadedFiles: true + + - task: PowerShell@2 + name: setReleaseVars + displayName: Set Release Configs Vars + inputs: + targetType: inline + pwsh: true + script: | + try { + if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { + $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt + + $BarId = $Content | Select -Index 0 + $Channels = $Content | Select -Index 1 + $IsStableBuild = $Content | Select -Index 2 + + $AzureDevOpsProject = $Env:System_TeamProject + $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId + $AzureDevOpsBuildId = $Env:Build_BuildId + } + else { + $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}" + + $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' + $apiHeaders.Add('Accept', 'application/json') + $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}") + + $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } + + $BarId = $Env:BARBuildId + $Channels = $Env:PromoteToMaestroChannels -split "," + $Channels = $Channels -join "][" + $Channels = "[$Channels]" + + $IsStableBuild = $buildInfo.stable + $AzureDevOpsProject = $buildInfo.azureDevOpsProject + $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId + $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId + } + + Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" + Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" + Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" + + Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" + Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" + Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" + } + catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + exit 1 + } + env: + MAESTRO_API_TOKEN: $(MaestroApiAccessToken) + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} diff --git a/eng/common/templates/post-build/trigger-subscription.yml b/eng/common/templates/post-build/trigger-subscription.yml new file mode 100644 index 0000000..da66903 --- /dev/null +++ b/eng/common/templates/post-build/trigger-subscription.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Triggering subscriptions + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1 + arguments: -SourceRepo $(Build.Repository.Uri) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates/steps/add-build-to-channel.yml b/eng/common/templates/steps/add-build-to-channel.yml new file mode 100644 index 0000000..f67a210 --- /dev/null +++ b/eng/common/templates/steps/add-build-to-channel.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Add Build to Channel + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 + arguments: -BuildId $(BARBuildId) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroApiAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml new file mode 100644 index 0000000..eba5810 --- /dev/null +++ b/eng/common/templates/steps/build-reason.yml @@ -0,0 +1,12 @@ +# build-reason.yml +# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons +# to include steps (',' separated). +parameters: + conditions: '' + steps: [] + +steps: + - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}: + - ${{ parameters.steps }} + - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}: + - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml new file mode 100644 index 0000000..0ecec47 --- /dev/null +++ b/eng/common/templates/steps/component-governance.yml @@ -0,0 +1,13 @@ +parameters: + disableComponentGovernance: false + componentGovernanceIgnoreDirectories: '' + +steps: +- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: + - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true" + displayName: Set skipComponentGovernanceDetection variable +- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: + - task: ComponentGovernanceComponentDetection@0 + continueOnError: true + inputs: + ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file diff --git a/eng/common/templates/steps/execute-codeql.yml b/eng/common/templates/steps/execute-codeql.yml new file mode 100644 index 0000000..3930b16 --- /dev/null +++ b/eng/common/templates/steps/execute-codeql.yml @@ -0,0 +1,32 @@ +parameters: + # Language that should be analyzed. Defaults to csharp + language: csharp + # Build Commands + buildCommands: '' + overrideParameters: '' # Optional: to override values for parameters. + additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth + # diagnosis of problems with specific tool configurations. + publishGuardianDirectoryToPipeline: false + # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL + # parameters rather than relying on YAML. It may be better to use a local script, because you can + # reproduce results locally without piecing together a command based on the YAML. + executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' + # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named + # 'continueOnError', the parameter value is not correctly picked up. + # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter + # optional: determines whether to continue the build if the step errors; + sdlContinueOnError: false + +steps: +- template: /eng/common/templates/steps/execute-sdl.yml + parameters: + overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} + executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} + overrideParameters: ${{ parameters.overrideParameters }} + additionalParameters: '${{ parameters.additionalParameters }} + -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")' + publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} + sdlContinueOnError: ${{ parameters.sdlContinueOnError }} \ No newline at end of file diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml new file mode 100644 index 0000000..07426fd --- /dev/null +++ b/eng/common/templates/steps/execute-sdl.yml @@ -0,0 +1,88 @@ +parameters: + overrideGuardianVersion: '' + executeAllSdlToolsScript: '' + overrideParameters: '' + additionalParameters: '' + publishGuardianDirectoryToPipeline: false + sdlContinueOnError: false + condition: '' + +steps: +- task: NuGetAuthenticate@1 + inputs: + nuGetServiceConnections: GuardianConnect + +- task: NuGetToolInstaller@1 + displayName: 'Install NuGet.exe' + +- ${{ if ne(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl + . .\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }} + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian (Overridden) + +- ${{ if eq(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl + . .\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian + +- ${{ if ne(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} + displayName: Execute SDL (Overridden) + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + +- ${{ if eq(parameters.overrideParameters, '') }}: + - powershell: ${{ parameters.executeAllSdlToolsScript }} + -GuardianCliLocation $(GuardianCliLocation) + -NugetPackageDirectory $(Build.SourcesDirectory)\.packages + -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) + ${{ parameters.additionalParameters }} + displayName: Execute SDL + continueOnError: ${{ parameters.sdlContinueOnError }} + condition: ${{ parameters.condition }} + +- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: + # We want to publish the Guardian results and configuration for easy diagnosis. However, the + # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default + # tooling files. Some of these files are large and aren't useful during an investigation, so + # exclude them by simply deleting them before publishing. (As of writing, there is no documented + # way to selectively exclude a dir from the pipeline artifact publish task.) + - task: DeleteFiles@1 + displayName: Delete Guardian dependencies to avoid uploading + inputs: + SourceFolder: $(Agent.BuildDirectory)/.gdn + Contents: | + c + i + condition: succeededOrFailed() + + - publish: $(Agent.BuildDirectory)/.gdn + artifact: GuardianConfiguration + displayName: Publish GuardianConfiguration + condition: succeededOrFailed() + + # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration + # with the "SARIF SAST Scans Tab" Azure DevOps extension + - task: CopyFiles@2 + displayName: Copy SARIF files + inputs: + flattenFolders: true + sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/ + contents: '**/*.sarif' + targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs + condition: succeededOrFailed() + + # Use PublishBuildArtifacts because the SARIF extension only checks this case + # see microsoft/sarif-azuredevops-extension#4 + - task: PublishBuildArtifacts@1 + displayName: Publish SARIF files to CodeAnalysisLogs container + inputs: + pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs + artifactName: CodeAnalysisLogs + condition: succeededOrFailed() \ No newline at end of file diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml new file mode 100644 index 0000000..a06373f --- /dev/null +++ b/eng/common/templates/steps/generate-sbom.yml @@ -0,0 +1,48 @@ +# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. +# PackageName - The name of the package this SBOM represents. +# PackageVersion - The version of the package this SBOM represents. +# ManifestDirPath - The path of the directory where the generated manifest files will be placed +# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. + +parameters: + PackageVersion: 7.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + PackageName: '.NET' + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom + IgnoreDirectories: '' + sbomContinueOnError: true + +steps: +- task: PowerShell@2 + displayName: Prep for SBOM generation in (Non-linux) + condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) + inputs: + filePath: ./eng/common/generate-sbom-prep.ps1 + arguments: ${{parameters.manifestDirPath}} + +# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 +- script: | + chmod +x ./eng/common/generate-sbom-prep.sh + ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} + displayName: Prep for SBOM generation in (Linux) + condition: eq(variables['Agent.Os'], 'Linux') + continueOnError: ${{ parameters.sbomContinueOnError }} + +- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 + displayName: 'Generate SBOM manifest' + continueOnError: ${{ parameters.sbomContinueOnError }} + inputs: + PackageName: ${{ parameters.packageName }} + BuildDropPath: ${{ parameters.buildDropPath }} + PackageVersion: ${{ parameters.packageVersion }} + ManifestDirPath: ${{ parameters.manifestDirPath }} + ${{ if ne(parameters.IgnoreDirectories, '') }}: + AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' + +- task: PublishPipelineArtifact@1 + displayName: Publish SBOM manifest + continueOnError: ${{parameters.sbomContinueOnError}} + inputs: + targetPath: '${{parameters.manifestDirPath}}' + artifactName: $(ARTIFACT_NAME) + diff --git a/eng/common/templates/steps/perf-send-to-helix.yml b/eng/common/templates/steps/perf-send-to-helix.yml new file mode 100644 index 0000000..a468e92 --- /dev/null +++ b/eng/common/templates/steps/perf-send-to-helix.yml @@ -0,0 +1,50 @@ +# Please remember to update the documentation if you make changes to these parameters! +parameters: + ProjectFile: '' # required -- project file that specifies the helix workitems + HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ + HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' + HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number + HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group + HelixPreCommands: '' # optional -- commands to run before Helix work item execution + HelixPostCommands: '' # optional -- commands to run after Helix work item execution + WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects + CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload + IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion + DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json + DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json + EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control + WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." + Creator: '' # optional -- if the build is external, use this to specify who is sending the job + DisplayNamePrefix: 'Send job to Helix' # optional -- rename the beginning of the displayName of the steps in AzDO + condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() + continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false + osGroup: '' # required -- operating system for the job + + +steps: +- template: /eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml + parameters: + osGroup: ${{ parameters.osGroup }} + sendParams: $(Build.SourcesDirectory)/eng/common/performance/${{ parameters.ProjectFile }} /restore /t:Test /bl:$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/SendToHelix.binlog + displayName: ${{ parameters.DisplayNamePrefix }} + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + environment: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml new file mode 100644 index 0000000..88f238f --- /dev/null +++ b/eng/common/templates/steps/publish-logs.yml @@ -0,0 +1,23 @@ +parameters: + StageLabel: '' + JobLabel: '' + +steps: +- task: Powershell@2 + displayName: Prepare Binlogs to Upload + inputs: + targetType: inline + script: | + New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + continueOnError: true + condition: always() + +- task: PublishBuildArtifacts@1 + displayName: Publish Logs + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' + PublishLocation: Container + ArtifactName: PostBuildLogs + continueOnError: true + condition: always() diff --git a/eng/common/templates/steps/retain-build.yml b/eng/common/templates/steps/retain-build.yml new file mode 100644 index 0000000..83d97a2 --- /dev/null +++ b/eng/common/templates/steps/retain-build.yml @@ -0,0 +1,28 @@ +parameters: + # Optional azure devops PAT with build execute permissions for the build's organization, + # only needed if the build that should be retained ran on a different organization than + # the pipeline where this template is executing from + Token: '' + # Optional BuildId to retain, defaults to the current running build + BuildId: '' + # Azure devops Organization URI for the build in the https://dev.azure.com/ format. + # Defaults to the organization the current pipeline is running on + AzdoOrgUri: '$(System.CollectionUri)' + # Azure devops project for the build. Defaults to the project the current pipeline is running on + AzdoProject: '$(System.TeamProject)' + +steps: + - task: powershell@2 + inputs: + targetType: 'filePath' + filePath: eng/common/retain-build.ps1 + pwsh: true + arguments: > + -AzdoOrgUri: ${{parameters.AzdoOrgUri}} + -AzdoProject ${{parameters.AzdoProject}} + -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} + -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} + displayName: Enable permanent build retention + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + BUILD_ID: $(Build.BuildId) \ No newline at end of file diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml new file mode 100644 index 0000000..e173381 --- /dev/null +++ b/eng/common/templates/steps/run-on-unix.yml @@ -0,0 +1,7 @@ +parameters: + agentOs: '' + steps: [] + +steps: +- ${{ if ne(parameters.agentOs, 'Windows_NT') }}: + - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml new file mode 100644 index 0000000..73e7e9c --- /dev/null +++ b/eng/common/templates/steps/run-on-windows.yml @@ -0,0 +1,7 @@ +parameters: + agentOs: '' + steps: [] + +steps: +- ${{ if eq(parameters.agentOs, 'Windows_NT') }}: + - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml new file mode 100644 index 0000000..3d1242f --- /dev/null +++ b/eng/common/templates/steps/run-script-ifequalelse.yml @@ -0,0 +1,33 @@ +parameters: + # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command + parameter1: '' + parameter2: '' + ifScript: '' + elseScript: '' + + # name of script step + name: Script + + # display name of script step + displayName: If-Equal-Else Script + + # environment + env: {} + + # conditional expression for step execution + condition: '' + +steps: +- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}: + - script: ${{ parameters.ifScript }} + name: ${{ parameters.name }} + displayName: ${{ parameters.displayName }} + env: ${{ parameters.env }} + condition: ${{ parameters.condition }} + +- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}: + - script: ${{ parameters.elseScript }} + name: ${{ parameters.name }} + displayName: ${{ parameters.displayName }} + env: ${{ parameters.env }} + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml new file mode 100644 index 0000000..3eb7e2d --- /dev/null +++ b/eng/common/templates/steps/send-to-helix.yml @@ -0,0 +1,91 @@ +# Please remember to update the documentation if you make changes to these parameters! +parameters: + HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ + HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' + HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number + HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group + HelixConfiguration: '' # optional -- additional property attached to a job + HelixPreCommands: '' # optional -- commands to run before Helix work item execution + HelixPostCommands: '' # optional -- commands to run after Helix work item execution + WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects + WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects + WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects + CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload + XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true + XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects + XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects + XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner + XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects + IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion + DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." + IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set + HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) + Creator: '' # optional -- if the build is external, use this to specify who is sending the job + DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO + condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() + continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false + +steps: + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' + displayName: ${{ parameters.DisplayNamePrefix }} (Windows) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + displayName: ${{ parameters.DisplayNamePrefix }} (Unix) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml new file mode 100644 index 0000000..41bbb91 --- /dev/null +++ b/eng/common/templates/steps/source-build.yml @@ -0,0 +1,129 @@ +parameters: + # This template adds arcade-powered source-build to CI. + + # This is a 'steps' template, and is intended for advanced scenarios where the existing build + # infra has a careful build methodology that must be followed. For example, a repo + # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline + # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to + # GitHub. Using this steps template leaves room for that infra to be included. + + # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml' + # for details. The entire object is described in the 'job' template for simplicity, even though + # the usage of the properties on this object is split between the 'job' and 'steps' templates. + platform: {} + +steps: +# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) +- script: | + set -x + df -h + + # If building on the internal project, the artifact feeds variable may be available (usually only if needed) + # In that case, call the feed setup script to add internal feeds corresponding to public ones. + # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. + # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those + # changes. + internalRestoreArgs= + if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then + # Temporarily work around https://github.com/dotnet/arcade/issues/7709 + chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh + $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) + internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' + + # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. + # This only works if there is a username/email configured, which won't be the case in most CI runs. + git config --get user.email + if [ $? -ne 0 ]; then + git config user.email dn-bot@microsoft.com + git config user.name dn-bot + fi + fi + + # If building on the internal project, the internal storage variable may be available (usually only if needed) + # In that case, add variables to allow the download of internal runtimes if the specified versions are not found + # in the default public locations. + internalRuntimeDownloadArgs= + if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' + fi + + buildConfig=Release + # Check if AzDO substitutes in a build config from a variable, and use it if so. + if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then + buildConfig='$(_BuildConfig)' + fi + + officialBuildArgs= + if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then + officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' + fi + + targetRidArgs= + if [ '${{ parameters.platform.targetRID }}' != '' ]; then + targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' + fi + + runtimeOsArgs= + if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then + runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' + fi + + baseOsArgs= + if [ '${{ parameters.platform.baseOS }}' != '' ]; then + baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' + fi + + publishArgs= + if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then + publishArgs='--publish' + fi + + assetManifestFileName=SourceBuild_RidSpecific.xml + if [ '${{ parameters.platform.name }}' != '' ]; then + assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml + fi + + ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ + --configuration $buildConfig \ + --restore --build --pack $publishArgs -bl \ + $officialBuildArgs \ + $internalRuntimeDownloadArgs \ + $internalRestoreArgs \ + $targetRidArgs \ + $runtimeOsArgs \ + $baseOsArgs \ + /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ + /p:ArcadeBuildFromSource=true \ + /p:AssetManifestFileName=$assetManifestFileName + displayName: Build + +# Upload build logs for diagnosis. +- task: CopyFiles@2 + displayName: Prepare BuildLogs staging directory + inputs: + SourceFolder: '$(Build.SourcesDirectory)' + Contents: | + **/*.log + **/*.binlog + artifacts/source-build/self/prebuilt-report/** + TargetFolder: '$(Build.StagingDirectory)/BuildLogs' + CleanTargetFolder: true + continueOnError: true + condition: succeededOrFailed() + +- task: PublishPipelineArtifact@1 + displayName: Publish BuildLogs + inputs: + targetPath: '$(Build.StagingDirectory)/BuildLogs' + artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) + continueOnError: true + condition: succeededOrFailed() + +# Manually inject component detection so that we can ignore the source build upstream cache, which contains +# a nupkg cache of input packages (a local feed). +# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' +# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets +- task: ComponentGovernanceComponentDetection@0 + displayName: Component Detection (Exclude upstream cache) + inputs: + ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache' diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml new file mode 100644 index 0000000..fadc04c --- /dev/null +++ b/eng/common/templates/steps/telemetry-end.yml @@ -0,0 +1,102 @@ +parameters: + maxRetries: 5 + retryDelay: 10 # in seconds + +steps: +- bash: | + if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then + errorCount=0 + else + errorCount=1 + fi + warningCount=0 + + curlStatus=1 + retryCount=0 + # retry loop to harden against spotty telemetry connections + # we don't retry successes and 4xx client errors + until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]] + do + if [ $retryCount -gt 0 ]; then + echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..." + sleep $RetryDelay + fi + + # create a temporary file for curl output + res=`mktemp` + + curlResult=` + curl --verbose --output $res --write-out "%{http_code}"\ + -H 'Content-Type: application/json' \ + -H "X-Helix-Job-Token: $Helix_JobToken" \ + -H 'Content-Length: 0' \ + -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \ + --data-urlencode "errorCount=$errorCount" \ + --data-urlencode "warningCount=$warningCount"` + curlStatus=$? + + if [ $curlStatus -eq 0 ]; then + if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then + curlStatus=$curlResult + fi + fi + + let retryCount++ + done + + if [ $curlStatus -ne 0 ]; then + echo "Failed to Send Build Finish information after $retryCount retries" + vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus" + echo "##$vstsLogOutput" + exit 1 + fi + displayName: Send Unix Build End Telemetry + env: + # defined via VSTS variables in start-job.sh + Helix_JobToken: $(Helix_JobToken) + Helix_WorkItemId: $(Helix_WorkItemId) + MaxRetries: ${{ parameters.maxRetries }} + RetryDelay: ${{ parameters.retryDelay }} + condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT')) +- powershell: | + if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) { + $ErrorCount = 0 + } else { + $ErrorCount = 1 + } + $WarningCount = 0 + + # Basic retry loop to harden against server flakiness + $retryCount = 0 + while ($retryCount -lt $env:MaxRetries) { + try { + Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" ` + -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken } + break + } + catch { + $statusCode = $_.Exception.Response.StatusCode.value__ + if ($statusCode -ge 400 -and $statusCode -le 499) { + Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)" + Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message + exit 1 + } + Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..." + $retryCount++ + sleep $env:RetryDelay + continue + } + } + + if ($retryCount -ge $env:MaxRetries) { + Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries." + exit 1 + } + displayName: Send Windows Build End Telemetry + env: + # defined via VSTS variables in start-job.ps1 + Helix_JobToken: $(Helix_JobToken) + Helix_WorkItemId: $(Helix_WorkItemId) + MaxRetries: ${{ parameters.maxRetries }} + RetryDelay: ${{ parameters.retryDelay }} + condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT')) diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml new file mode 100644 index 0000000..32c01ef --- /dev/null +++ b/eng/common/templates/steps/telemetry-start.yml @@ -0,0 +1,241 @@ +parameters: + helixSource: 'undefined_defaulted_in_telemetry.yml' + helixType: 'undefined_defaulted_in_telemetry.yml' + buildConfig: '' + runAsPublic: false + maxRetries: 5 + retryDelay: 10 # in seconds + +steps: +- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}: + - task: AzureKeyVault@1 + inputs: + azureSubscription: 'HelixProd_KeyVault' + KeyVaultName: HelixProdKV + SecretsFilter: 'HelixApiAccessToken' + condition: always() +- bash: | + # create a temporary file + jobInfo=`mktemp` + + # write job info content to temporary file + cat > $jobInfo < powershell invocations +# as dot sourcing isn't possible. +function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { + if (Test-Path variable:global:_DotNetInstallDir) { + return $global:_DotNetInstallDir + } + + # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism + $env:DOTNET_MULTILEVEL_LOOKUP=0 + + # Disable first run since we do not need all ASP.NET packages restored. + $env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1 + + # Disable telemetry on CI. + if ($ci) { + $env:DOTNET_CLI_TELEMETRY_OPTOUT=1 + } + + # Source Build uses DotNetCoreSdkDir variable + if ($env:DotNetCoreSdkDir -ne $null) { + $env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir + } + + # Find the first path on %PATH% that contains the dotnet.exe + if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) { + $dotnetExecutable = GetExecutableFileName 'dotnet' + $dotnetCmd = Get-Command $dotnetExecutable -ErrorAction SilentlyContinue + + if ($dotnetCmd -ne $null) { + $env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent + } + } + + $dotnetSdkVersion = $GlobalJson.tools.dotnet + + # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version, + # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues. + if ((-not $globalJsonHasRuntimes) -and (-not [string]::IsNullOrEmpty($env:DOTNET_INSTALL_DIR)) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) { + $dotnetRoot = $env:DOTNET_INSTALL_DIR + } else { + $dotnetRoot = Join-Path $RepoRoot '.dotnet' + + if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) { + if ($install) { + InstallDotNetSdk $dotnetRoot $dotnetSdkVersion + } else { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'" + ExitWithExitCode 1 + } + } + + $env:DOTNET_INSTALL_DIR = $dotnetRoot + } + + # Creates a temporary file under the toolset dir. + # The following code block is protecting against concurrent access so that this function can + # be called in parallel. + if ($createSdkLocationFile) { + do { + $sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName()) + } + until (!(Test-Path $sdkCacheFileTemp)) + Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot + + try { + Move-Item -Force $sdkCacheFileTemp (Join-Path $ToolsetDir 'sdk.txt') + } catch { + # Somebody beat us + Remove-Item -Path $sdkCacheFileTemp + } + } + + # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom + # build steps from using anything other than what we've downloaded. + # It also ensures that VS msbuild will use the downloaded sdk targets. + $env:PATH = "$dotnetRoot;$env:PATH" + + # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build + Write-PipelinePrependPath -Path $dotnetRoot + + Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0' + Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1' + + return $global:_DotNetInstallDir = $dotnetRoot +} + +function Retry($downloadBlock, $maxRetries = 5) { + $retries = 1 + + while($true) { + try { + & $downloadBlock + break + } + catch { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + } + + if (++$retries -le $maxRetries) { + $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff + Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)." + Start-Sleep -Seconds $delayInSeconds + } + else { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts." + break + } + + } +} + +function GetDotNetInstallScript([string] $dotnetRoot) { + $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1' + if (!(Test-Path $installScript)) { + Create-Directory $dotnetRoot + $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit + $uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" + + Retry({ + Write-Host "GET $uri" + Invoke-WebRequest $uri -OutFile $installScript + }) + } + + return $installScript +} + +function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '', [switch] $noPath) { + InstallDotNet $dotnetRoot $version $architecture '' $false $runtimeSourceFeed $runtimeSourceFeedKey -noPath:$noPath +} + +function InstallDotNet([string] $dotnetRoot, + [string] $version, + [string] $architecture = '', + [string] $runtime = '', + [bool] $skipNonVersionedFiles = $false, + [string] $runtimeSourceFeed = '', + [string] $runtimeSourceFeedKey = '', + [switch] $noPath) { + + $dotnetVersionLabel = "'sdk v$version'" + + if ($runtime -ne '' -and $runtime -ne 'sdk') { + $runtimePath = $dotnetRoot + $runtimePath = $runtimePath + "\shared" + if ($runtime -eq "dotnet") { $runtimePath = $runtimePath + "\Microsoft.NETCore.App" } + if ($runtime -eq "aspnetcore") { $runtimePath = $runtimePath + "\Microsoft.AspNetCore.App" } + if ($runtime -eq "windowsdesktop") { $runtimePath = $runtimePath + "\Microsoft.WindowsDesktop.App" } + $runtimePath = $runtimePath + "\" + $version + + $dotnetVersionLabel = "runtime toolset '$runtime/$architecture v$version'" + + if (Test-Path $runtimePath) { + Write-Host " Runtime toolset '$runtime/$architecture v$version' already installed." + $installSuccess = $true + Exit + } + } + + $installScript = GetDotNetInstallScript $dotnetRoot + $installParameters = @{ + Version = $version + InstallDir = $dotnetRoot + } + + if ($architecture) { $installParameters.Architecture = $architecture } + if ($runtime) { $installParameters.Runtime = $runtime } + if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles } + if ($noPath) { $installParameters.NoPath = $True } + + $variations = @() + $variations += @($installParameters) + + $dotnetBuilds = $installParameters.Clone() + $dotnetbuilds.AzureFeed = "https://dotnetbuilds.azureedge.net/public" + $variations += @($dotnetBuilds) + + if ($runtimeSourceFeed) { + $runtimeSource = $installParameters.Clone() + $runtimeSource.AzureFeed = $runtimeSourceFeed + if ($runtimeSourceFeedKey) { + $decodedBytes = [System.Convert]::FromBase64String($runtimeSourceFeedKey) + $decodedString = [System.Text.Encoding]::UTF8.GetString($decodedBytes) + $runtimeSource.FeedCredential = $decodedString + } + $variations += @($runtimeSource) + } + + $installSuccess = $false + foreach ($variation in $variations) { + if ($variation | Get-Member AzureFeed) { + $location = $variation.AzureFeed + } else { + $location = "public location"; + } + Write-Host " Attempting to install $dotnetVersionLabel from $location." + try { + & $installScript @variation + $installSuccess = $true + break + } + catch { + Write-Host " Failed to install $dotnetVersionLabel from $location." + } + } + if (-not $installSuccess) { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install $dotnetVersionLabel from any of the specified locations." + ExitWithExitCode 1 + } +} + +# +# Locates Visual Studio MSBuild installation. +# The preference order for MSBuild to use is as follows: +# +# 1. MSBuild from an active VS command prompt +# 2. MSBuild from a compatible VS installation +# 3. MSBuild from the xcopy tool package +# +# Returns full path to msbuild.exe. +# Throws on failure. +# +function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = $null) { + if (-not (IsWindowsPlatform)) { + throw "Cannot initialize Visual Studio on non-Windows" + } + + if (Test-Path variable:global:_MSBuildExe) { + return $global:_MSBuildExe + } + + # Minimum VS version to require. + $vsMinVersionReqdStr = '17.7' + $vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr) + + # If the version of msbuild is going to be xcopied, + # use this version. Version matches a package here: + # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/RoslynTools.MSBuild/versions/17.8.1-2 + $defaultXCopyMSBuildVersion = '17.8.1-2' + + if (!$vsRequirements) { + if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { + $vsRequirements = $GlobalJson.tools.vs + } + else { + $vsRequirements = New-Object PSObject -Property @{ version = $vsMinVersionReqdStr } + } + } + $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr } + $vsMinVersion = [Version]::new($vsMinVersionStr) + + # Try msbuild command available in the environment. + if ($env:VSINSTALLDIR -ne $null) { + $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue + if ($msbuildCmd -ne $null) { + # Workaround for https://github.com/dotnet/roslyn/issues/35793 + # Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+ + $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0]) + + if ($msbuildVersion -ge $vsMinVersion) { + return $global:_MSBuildExe = $msbuildCmd.Path + } + + # Report error - the developer environment is initialized with incompatible VS version. + throw "Developer Command Prompt for VS $($env:VisualStudioVersion) is not recent enough. Please upgrade to $vsMinVersionStr or build from a plain CMD window" + } + } + + # Locate Visual Studio installation or download x-copy msbuild. + $vsInfo = LocateVisualStudio $vsRequirements + if ($vsInfo -ne $null) { + # Ensure vsInstallDir has a trailing slash + $vsInstallDir = Join-Path $vsInfo.installationPath "\" + $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0] + + InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion + } else { + + if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') { + $xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild' + $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0] + } else { + #if vs version provided in global.json is incompatible (too low) then use the default version for xcopy msbuild download + if($vsMinVersion -lt $vsMinVersionReqd){ + Write-Host "Using xcopy-msbuild version of $defaultXCopyMSBuildVersion since VS version $vsMinVersionStr provided in global.json is not compatible" + $xcopyMSBuildVersion = $defaultXCopyMSBuildVersion + $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0] + } + else{ + # If the VS version IS compatible, look for an xcopy msbuild package + # with a version matching VS. + # Note: If this version does not exist, then an explicit version of xcopy msbuild + # can be specified in global.json. This will be required for pre-release versions of msbuild. + $vsMajorVersion = $vsMinVersion.Major + $vsMinorVersion = $vsMinVersion.Minor + $xcopyMSBuildVersion = "$vsMajorVersion.$vsMinorVersion.0" + } + } + + $vsInstallDir = $null + if ($xcopyMSBuildVersion.Trim() -ine "none") { + $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install + if ($vsInstallDir -eq $null) { + throw "Could not xcopy msbuild. Please check that package 'RoslynTools.MSBuild @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'." + } + } + if ($vsInstallDir -eq $null) { + throw 'Unable to find Visual Studio that has required version and components installed' + } + } + + $msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" } + + $local:BinFolder = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin" + $local:Prefer64bit = if (Get-Member -InputObject $vsRequirements -Name 'Prefer64bit') { $vsRequirements.Prefer64bit } else { $false } + if ($local:Prefer64bit -and (Test-Path(Join-Path $local:BinFolder "amd64"))) { + $global:_MSBuildExe = Join-Path $local:BinFolder "amd64\msbuild.exe" + } else { + $global:_MSBuildExe = Join-Path $local:BinFolder "msbuild.exe" + } + + return $global:_MSBuildExe +} + +function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) { + $env:VSINSTALLDIR = $vsInstallDir + Set-Item "env:VS$($vsMajorVersion)0COMNTOOLS" (Join-Path $vsInstallDir "Common7\Tools\") + + $vsSdkInstallDir = Join-Path $vsInstallDir "VSSDK\" + if (Test-Path $vsSdkInstallDir) { + Set-Item "env:VSSDK$($vsMajorVersion)0Install" $vsSdkInstallDir + $env:VSSDKInstall = $vsSdkInstallDir + } +} + +function InstallXCopyMSBuild([string]$packageVersion) { + return InitializeXCopyMSBuild $packageVersion -install $true +} + +function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) { + $packageName = 'RoslynTools.MSBuild' + $packageDir = Join-Path $ToolsDir "msbuild\$packageVersion" + $packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg" + + if (!(Test-Path $packageDir)) { + if (!$install) { + return $null + } + + Create-Directory $packageDir + + Write-Host "Downloading $packageName $packageVersion" + $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit + Retry({ + Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath + }) + + Unzip $packagePath $packageDir + } + + return Join-Path $packageDir 'tools' +} + +# +# Locates Visual Studio instance that meets the minimal requirements specified by tools.vs object in global.json. +# +# The following properties of tools.vs are recognized: +# "version": "{major}.{minor}" +# Two part minimal VS version, e.g. "15.9", "16.0", etc. +# "components": ["componentId1", "componentId2", ...] +# Array of ids of workload components that must be available in the VS instance. +# See e.g. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-enterprise?view=vs-2017 +# +# Returns JSON describing the located VS instance (same format as returned by vswhere), +# or $null if no instance meeting the requirements is found on the machine. +# +function LocateVisualStudio([object]$vsRequirements = $null){ + if (-not (IsWindowsPlatform)) { + throw "Cannot run vswhere on non-Windows platforms." + } + + if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') { + $vswhereVersion = $GlobalJson.tools.vswhere + } else { + $vswhereVersion = '2.5.2' + } + + $vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion" + $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe' + + if (!(Test-Path $vsWhereExe)) { + Create-Directory $vsWhereDir + Write-Host 'Downloading vswhere' + Retry({ + Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe + }) + } + + if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } + $args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*') + + if (!$excludePrereleaseVS) { + $args += '-prerelease' + } + + if (Get-Member -InputObject $vsRequirements -Name 'version') { + $args += '-version' + $args += $vsRequirements.version + } + + if (Get-Member -InputObject $vsRequirements -Name 'components') { + foreach ($component in $vsRequirements.components) { + $args += '-requires' + $args += $component + } + } + + $vsInfo =& $vsWhereExe $args | ConvertFrom-Json + + if ($lastExitCode -ne 0) { + return $null + } + + # use first matching instance + return $vsInfo[0] +} + +function InitializeBuildTool() { + if (Test-Path variable:global:_BuildTool) { + # If the requested msbuild parameters do not match, clear the cached variables. + if($global:_BuildTool.Contains('ExcludePrereleaseVS') -and $global:_BuildTool.ExcludePrereleaseVS -ne $excludePrereleaseVS) { + Remove-Item variable:global:_BuildTool + Remove-Item variable:global:_MSBuildExe + } else { + return $global:_BuildTool + } + } + + if (-not $msbuildEngine) { + $msbuildEngine = GetDefaultMSBuildEngine + } + + # Initialize dotnet cli if listed in 'tools' + $dotnetRoot = $null + if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { + $dotnetRoot = InitializeDotNetCli -install:$restore + } + + if ($msbuildEngine -eq 'dotnet') { + if (!$dotnetRoot) { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'." + ExitWithExitCode 1 + } + $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet') + $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net8.0' } + } elseif ($msbuildEngine -eq "vs") { + try { + $msbuildPath = InitializeVisualStudioMSBuild -install:$restore + } catch { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + ExitWithExitCode 1 + } + + $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS } + } else { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." + ExitWithExitCode 1 + } + + return $global:_BuildTool = $buildTool +} + +function GetDefaultMSBuildEngine() { + # Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows. + if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { + return 'vs' + } + + if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { + return 'dotnet' + } + + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'." + ExitWithExitCode 1 +} + +function GetNuGetPackageCachePath() { + if ($env:NUGET_PACKAGES -eq $null) { + # Use local cache on CI to ensure deterministic build. + # Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116 + # use global cache in dev builds to avoid cost of downloading packages. + # For directory normalization, see also: https://github.com/NuGet/Home/issues/7968 + if ($useGlobalNuGetCache) { + $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\' + } else { + $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\' + $env:RESTORENOCACHE = $true + } + } + + return $env:NUGET_PACKAGES +} + +# Returns a full path to an Arcade SDK task project file. +function GetSdkTaskProject([string]$taskName) { + return Join-Path (Split-Path (InitializeToolset) -Parent) "SdkTasks\$taskName.proj" +} + +function InitializeNativeTools() { + if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) { + $nativeArgs= @{} + if ($ci) { + $nativeArgs = @{ + InstallDirectory = "$ToolsDir" + } + } + if ($env:NativeToolsOnMachine) { + Write-Host "Variable NativeToolsOnMachine detected, enabling native tool path promotion..." + $nativeArgs += @{ PathPromotion = $true } + } + & "$PSScriptRoot/init-tools-native.ps1" @nativeArgs + } +} + +function Read-ArcadeSdkVersion() { + return $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk' +} + +function InitializeToolset() { + if (Test-Path variable:global:_ToolsetBuildProj) { + return $global:_ToolsetBuildProj + } + + $nugetCache = GetNuGetPackageCachePath + + $toolsetVersion = Read-ArcadeSdkVersion + $toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt" + + if (Test-Path $toolsetLocationFile) { + $path = Get-Content $toolsetLocationFile -TotalCount 1 + if (Test-Path $path) { + return $global:_ToolsetBuildProj = $path + } + } + + if (-not $restore) { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored." + ExitWithExitCode 1 + } + + $buildTool = InitializeBuildTool + + $proj = Join-Path $ToolsetDir 'restore.proj' + $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' } + + '' | Set-Content $proj + + MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile + + $path = Get-Content $toolsetLocationFile -Encoding UTF8 -TotalCount 1 + if (!(Test-Path $path)) { + throw "Invalid toolset path: $path" + } + + return $global:_ToolsetBuildProj = $path +} + +function ExitWithExitCode([int] $exitCode) { + if ($ci -and $prepareMachine) { + Stop-Processes + } + exit $exitCode +} + +# Check if $LASTEXITCODE is a nonzero exit code (NZEC). If so, print a Azure Pipeline error for +# diagnostics, then exit the script with the $LASTEXITCODE. +function Exit-IfNZEC([string] $category = "General") { + Write-Host "Exit code $LASTEXITCODE" + if ($LASTEXITCODE -ne 0) { + $message = "Last command failed with exit code $LASTEXITCODE." + Write-PipelineTelemetryError -Force -Category $category -Message $message + ExitWithExitCode $LASTEXITCODE + } +} + +function Stop-Processes() { + Write-Host 'Killing running build processes...' + foreach ($processName in $processesToStopOnExit) { + Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process + } +} + +# +# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function. +# The arguments are automatically quoted. +# Terminates the script if the build fails. +# +function MSBuild() { + if ($pipelinesLog) { + $buildTool = InitializeBuildTool + + if ($ci -and $buildTool.Tool -eq 'dotnet') { + $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20 + $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20 + Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20' + Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20' + } + + Enable-Nuget-EnhancedRetry + + $toolsetBuildProject = InitializeToolset + $basePath = Split-Path -parent $toolsetBuildProject + $possiblePaths = @( + # new scripts need to work with old packages, so we need to look for the old names/versions + (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')), + (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.Arcade.Sdk.dll')) + (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.Arcade.Sdk.dll')) + (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll')) + ) + $selectedPath = $null + foreach ($path in $possiblePaths) { + if (Test-Path $path -PathType Leaf) { + $selectedPath = $path + break + } + } + if (-not $selectedPath) { + Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.' + ExitWithExitCode 1 + } + $args += "/logger:$selectedPath" + } + + MSBuild-Core @args +} + +# +# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function. +# The arguments are automatically quoted. +# Terminates the script if the build fails. +# +function MSBuild-Core() { + if ($ci) { + if (!$binaryLog -and !$excludeCIBinarylog) { + Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build, or explicitly opted-out from with the -excludeCIBinarylog switch.' + ExitWithExitCode 1 + } + + if ($nodeReuse) { + Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.' + ExitWithExitCode 1 + } + } + + Enable-Nuget-EnhancedRetry + + $buildTool = InitializeBuildTool + + $cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci" + + if ($warnAsError) { + $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true' + } + else { + $cmdArgs += ' /p:TreatWarningsAsErrors=false' + } + + foreach ($arg in $args) { + if ($null -ne $arg -and $arg.Trim() -ne "") { + if ($arg.EndsWith('\')) { + $arg = $arg + "\" + } + $cmdArgs += " `"$arg`"" + } + } + + $env:ARCADE_BUILD_TOOL_COMMAND = "$($buildTool.Path) $cmdArgs" + + $exitCode = Exec-Process $buildTool.Path $cmdArgs + + if ($exitCode -ne 0) { + # We should not Write-PipelineTaskError here because that message shows up in the build summary + # The build already logged an error, that's the reason it failed. Producing an error here only adds noise. + Write-Host "Build failed with exit code $exitCode. Check errors above." -ForegroundColor Red + + $buildLog = GetMSBuildBinaryLogCommandLineArgument $args + if ($null -ne $buildLog) { + Write-Host "See log: $buildLog" -ForegroundColor DarkGray + } + + # When running on Azure Pipelines, override the returned exit code to avoid double logging. + if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null) { + Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed." + # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error + # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error + ExitWithExitCode 0 + } else { + ExitWithExitCode $exitCode + } + } +} + +function GetMSBuildBinaryLogCommandLineArgument($arguments) { + foreach ($argument in $arguments) { + if ($argument -ne $null) { + $arg = $argument.Trim() + if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) { + return $arg.Substring('/bl:'.Length) + } + + if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) { + return $arg.Substring('/binaryLogger:'.Length) + } + } + } + + return $null +} + +function GetExecutableFileName($baseName) { + if (IsWindowsPlatform) { + return "$baseName.exe" + } + else { + return $baseName + } +} + +function IsWindowsPlatform() { + return [environment]::OSVersion.Platform -eq [PlatformID]::Win32NT +} + +function Get-Darc($version) { + $darcPath = "$TempDir\darc\$(New-Guid)" + if ($version -ne $null) { + & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath -darcVersion $version | Out-Host + } else { + & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath | Out-Host + } + return "$darcPath\darc.exe" +} + +. $PSScriptRoot\pipeline-logging-functions.ps1 + +$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..\') +$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..') +$ArtifactsDir = Join-Path $RepoRoot 'artifacts' +$ToolsetDir = Join-Path $ArtifactsDir 'toolset' +$ToolsDir = Join-Path $RepoRoot '.tools' +$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration +$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration +$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json +# true if global.json contains a "runtimes" section +$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false } + +Create-Directory $ToolsetDir +Create-Directory $TempDir +Create-Directory $LogDir + +Write-PipelineSetVariable -Name 'Artifacts' -Value $ArtifactsDir +Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir +Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir +Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir +Write-PipelineSetVariable -Name 'TMP' -Value $TempDir + +# Import custom tools configuration, if present in the repo. +# Note: Import in global scope so that the script set top-level variables without qualification. +if (!$disableConfigureToolsetImport) { + $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1' + if (Test-Path $configureToolsetScript) { + . $configureToolsetScript + if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) { + if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) { + Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code' + ExitWithExitCode $LastExitCode + } + } + } +} + +# +# If $ci flag is set, turn on (and log that we did) special environment variables for improved Nuget client retry logic. +# +function Enable-Nuget-EnhancedRetry() { + if ($ci) { + Write-Host "Setting NUGET enhanced retry environment variables" + $env:NUGET_ENABLE_ENHANCED_HTTP_RETRY = 'true' + $env:NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT = 6 + $env:NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS = 1000 + $env:NUGET_RETRY_HTTP_429 = 'true' + Write-PipelineSetVariable -Name 'NUGET_ENABLE_ENHANCED_HTTP_RETRY' -Value 'true' + Write-PipelineSetVariable -Name 'NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT' -Value '6' + Write-PipelineSetVariable -Name 'NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS' -Value '1000' + Write-PipelineSetVariable -Name 'NUGET_RETRY_HTTP_429' -Value 'true' + } +} diff --git a/eng/common/tools.sh b/eng/common/tools.sh new file mode 100644 index 0000000..e8d4789 --- /dev/null +++ b/eng/common/tools.sh @@ -0,0 +1,587 @@ +#!/usr/bin/env bash + +# Initialize variables if they aren't already defined. + +# CI mode - set to true on CI server for PR validation build or official build. +ci=${ci:-false} + +# Set to true to use the pipelines logger which will enable Azure logging output. +# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md +# This flag is meant as a temporary opt-opt for the feature while validate it across +# our consumers. It will be deleted in the future. +if [[ "$ci" == true ]]; then + pipelines_log=${pipelines_log:-true} +else + pipelines_log=${pipelines_log:-false} +fi + +# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names. +configuration=${configuration:-'Debug'} + +# Set to true to opt out of outputting binary log while running in CI +exclude_ci_binary_log=${exclude_ci_binary_log:-false} + +if [[ "$ci" == true && "$exclude_ci_binary_log" == false ]]; then + binary_log_default=true +else + binary_log_default=false +fi + +# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build. +binary_log=${binary_log:-$binary_log_default} + +# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes). +prepare_machine=${prepare_machine:-false} + +# True to restore toolsets and dependencies. +restore=${restore:-true} + +# Adjusts msbuild verbosity level. +verbosity=${verbosity:-'minimal'} + +# Set to true to reuse msbuild nodes. Recommended to not reuse on CI. +if [[ "$ci" == true ]]; then + node_reuse=${node_reuse:-false} +else + node_reuse=${node_reuse:-true} +fi + +# Configures warning treatment in msbuild. +warn_as_error=${warn_as_error:-true} + +# True to attempt using .NET Core already that meets requirements specified in global.json +# installed on the machine instead of downloading one. +use_installed_dotnet_cli=${use_installed_dotnet_cli:-true} + +# Enable repos to use a particular version of the on-line dotnet-install scripts. +# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh +dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'} + +# True to use global NuGet cache instead of restoring packages to repository-local directory. +if [[ "$ci" == true ]]; then + use_global_nuget_cache=${use_global_nuget_cache:-false} +else + use_global_nuget_cache=${use_global_nuget_cache:-true} +fi + +# Used when restoring .NET SDK from alternative feeds +runtime_source_feed=${runtime_source_feed:-''} +runtime_source_feed_key=${runtime_source_feed_key:-''} + +# Resolve any symlinks in the given path. +function ResolvePath { + local path=$1 + + while [[ -h $path ]]; do + local dir="$( cd -P "$( dirname "$path" )" && pwd )" + path="$(readlink "$path")" + + # if $path was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $path != /* ]] && path="$dir/$path" + done + + # return value + _ResolvePath="$path" +} + +# ReadVersionFromJson [json key] +function ReadGlobalVersion { + local key=$1 + + if command -v jq &> /dev/null; then + _ReadGlobalVersion="$(jq -r ".[] | select(has(\"$key\")) | .\"$key\"" "$global_json_file")" + elif [[ "$(cat "$global_json_file")" =~ \"$key\"[[:space:]\:]*\"([^\"]+) ]]; then + _ReadGlobalVersion=${BASH_REMATCH[1]} + fi + + if [[ -z "$_ReadGlobalVersion" ]]; then + Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file" + ExitWithExitCode 1 + fi +} + +function InitializeDotNetCli { + if [[ -n "${_InitializeDotNetCli:-}" ]]; then + return + fi + + local install=$1 + + # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism + export DOTNET_MULTILEVEL_LOOKUP=0 + + # Disable first run since we want to control all package sources + export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1 + + # Disable telemetry on CI + if [[ $ci == true ]]; then + export DOTNET_CLI_TELEMETRY_OPTOUT=1 + fi + + # LTTNG is the logging infrastructure used by Core CLR. Need this variable set + # so it doesn't output warnings to the console. + export LTTNG_HOME="$HOME" + + # Source Build uses DotNetCoreSdkDir variable + if [[ -n "${DotNetCoreSdkDir:-}" ]]; then + export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir" + fi + + # Find the first path on $PATH that contains the dotnet.exe + if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then + local dotnet_path=`command -v dotnet` + if [[ -n "$dotnet_path" ]]; then + ResolvePath "$dotnet_path" + export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"` + fi + fi + + ReadGlobalVersion "dotnet" + local dotnet_sdk_version=$_ReadGlobalVersion + local dotnet_root="" + + # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version, + # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues. + if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then + dotnet_root="$DOTNET_INSTALL_DIR" + else + dotnet_root="$repo_root/.dotnet" + + export DOTNET_INSTALL_DIR="$dotnet_root" + + if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then + if [[ "$install" == true ]]; then + InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version" + else + Write-PipelineTelemetryError -category 'InitializeToolset' "Unable to find dotnet with SDK version '$dotnet_sdk_version'" + ExitWithExitCode 1 + fi + fi + fi + + # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom + # build steps from using anything other than what we've downloaded. + Write-PipelinePrependPath -path "$dotnet_root" + + Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0" + Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1" + + # return value + _InitializeDotNetCli="$dotnet_root" +} + +function InstallDotNetSdk { + local root=$1 + local version=$2 + local architecture="unset" + if [[ $# -ge 3 ]]; then + architecture=$3 + fi + InstallDotNet "$root" "$version" $architecture 'sdk' 'true' $runtime_source_feed $runtime_source_feed_key +} + +function InstallDotNet { + local root=$1 + local version=$2 + local runtime=$4 + + local dotnetVersionLabel="'$runtime v$version'" + if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then + runtimePath="$root" + runtimePath="$runtimePath/shared" + case "$runtime" in + dotnet) + runtimePath="$runtimePath/Microsoft.NETCore.App" + ;; + aspnetcore) + runtimePath="$runtimePath/Microsoft.AspNetCore.App" + ;; + windowsdesktop) + runtimePath="$runtimePath/Microsoft.WindowsDesktop.App" + ;; + *) + ;; + esac + runtimePath="$runtimePath/$version" + + dotnetVersionLabel="runtime toolset '$runtime/$architecture v$version'" + + if [ -d "$runtimePath" ]; then + echo " Runtime toolset '$runtime/$architecture v$version' already installed." + local installSuccess=1 + return + fi + fi + + GetDotNetInstallScript "$root" + local install_script=$_GetDotNetInstallScript + + local installParameters=(--version $version --install-dir "$root") + + if [[ -n "${3:-}" ]] && [ "$3" != 'unset' ]; then + installParameters+=(--architecture $3) + fi + if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then + installParameters+=(--runtime $4) + fi + if [[ "$#" -ge "5" ]] && [[ "$5" != 'false' ]]; then + installParameters+=(--skip-non-versioned-files) + fi + + local variations=() # list of variable names with parameter arrays in them + + local public_location=("${installParameters[@]}") + variations+=(public_location) + + local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://dotnetbuilds.azureedge.net/public") + variations+=(dotnetbuilds) + + if [[ -n "${6:-}" ]]; then + variations+=(private_feed) + local private_feed=("${installParameters[@]}" --azure-feed $6) + if [[ -n "${7:-}" ]]; then + # The 'base64' binary on alpine uses '-d' and doesn't support '--decode' + # '-d'. To work around this, do a simple detection and switch the parameter + # accordingly. + decodeArg="--decode" + if base64 --help 2>&1 | grep -q "BusyBox"; then + decodeArg="-d" + fi + decodedFeedKey=`echo $7 | base64 $decodeArg` + private_feed+=(--feed-credential $decodedFeedKey) + fi + fi + + local installSuccess=0 + for variationName in "${variations[@]}"; do + local name="$variationName[@]" + local variation=("${!name}") + echo " Attempting to install $dotnetVersionLabel from $variationName." + bash "$install_script" "${variation[@]}" && installSuccess=1 + if [[ "$installSuccess" -eq 1 ]]; then + break + fi + + echo " Failed to install $dotnetVersionLabel from $variationName." + done + + if [[ "$installSuccess" -eq 0 ]]; then + Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install $dotnetVersionLabel from any of the specified locations." + ExitWithExitCode 1 + fi +} + +function with_retries { + local maxRetries=5 + local retries=1 + echo "Trying to run '$@' for maximum of $maxRetries attempts." + while [[ $((retries++)) -le $maxRetries ]]; do + "$@" + + if [[ $? == 0 ]]; then + echo "Ran '$@' successfully." + return 0 + fi + + timeout=$((3**$retries-1)) + echo "Failed to execute '$@'. Waiting $timeout seconds before next attempt ($retries out of $maxRetries)." 1>&2 + sleep $timeout + done + + echo "Failed to execute '$@' for $maxRetries times." 1>&2 + + return 1 +} + +function GetDotNetInstallScript { + local root=$1 + local install_script="$root/dotnet-install.sh" + local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" + + if [[ ! -a "$install_script" ]]; then + mkdir -p "$root" + + echo "Downloading '$install_script_url'" + + # Use curl if available, otherwise use wget + if command -v curl > /dev/null; then + # first, try directly, if this fails we will retry with verbose logging + curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || { + if command -v openssl &> /dev/null; then + echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation" + echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 + fi + echo "Will now retry the same URL with verbose logging." + with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || { + local exit_code=$? + Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')." + ExitWithExitCode $exit_code + } + } + else + with_retries wget -v -O "$install_script" "$install_script_url" || { + local exit_code=$? + Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')." + ExitWithExitCode $exit_code + } + fi + fi + # return value + _GetDotNetInstallScript="$install_script" +} + +function InitializeBuildTool { + if [[ -n "${_InitializeBuildTool:-}" ]]; then + return + fi + + InitializeDotNetCli $restore + + # return values + _InitializeBuildTool="$_InitializeDotNetCli/dotnet" + _InitializeBuildToolCommand="msbuild" + _InitializeBuildToolFramework="net8.0" +} + +# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116 +function GetNuGetPackageCachePath { + if [[ -z ${NUGET_PACKAGES:-} ]]; then + if [[ "$use_global_nuget_cache" == true ]]; then + export NUGET_PACKAGES="$HOME/.nuget/packages" + else + export NUGET_PACKAGES="$repo_root/.packages" + export RESTORENOCACHE=true + fi + fi + + # return value + _GetNuGetPackageCachePath=$NUGET_PACKAGES +} + +function InitializeNativeTools() { + if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then + return + fi + if grep -Fq "native-tools" $global_json_file + then + local nativeArgs="" + if [[ "$ci" == true ]]; then + nativeArgs="--installDirectory $tools_dir" + fi + "$_script_dir/init-tools-native.sh" $nativeArgs + fi +} + +function InitializeToolset { + if [[ -n "${_InitializeToolset:-}" ]]; then + return + fi + + GetNuGetPackageCachePath + + ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk" + + local toolset_version=$_ReadGlobalVersion + local toolset_location_file="$toolset_dir/$toolset_version.txt" + + if [[ -a "$toolset_location_file" ]]; then + local path=`cat "$toolset_location_file"` + if [[ -a "$path" ]]; then + # return value + _InitializeToolset="$path" + return + fi + fi + + if [[ "$restore" != true ]]; then + Write-PipelineTelemetryError -category 'InitializeToolset' "Toolset version $toolset_version has not been restored." + ExitWithExitCode 2 + fi + + local proj="$toolset_dir/restore.proj" + + local bl="" + if [[ "$binary_log" == true ]]; then + bl="/bl:$log_dir/ToolsetRestore.binlog" + fi + + echo '' > "$proj" + MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file" + + local toolset_build_proj=`cat "$toolset_location_file"` + + if [[ ! -a "$toolset_build_proj" ]]; then + Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj" + ExitWithExitCode 3 + fi + + # return value + _InitializeToolset="$toolset_build_proj" +} + +function ExitWithExitCode { + if [[ "$ci" == true && "$prepare_machine" == true ]]; then + StopProcesses + fi + exit $1 +} + +function StopProcesses { + echo "Killing running build processes..." + pkill -9 "dotnet" || true + pkill -9 "vbcscompiler" || true + return 0 +} + +function MSBuild { + local args=$@ + if [[ "$pipelines_log" == true ]]; then + InitializeBuildTool + InitializeToolset + + if [[ "$ci" == true ]]; then + export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20 + export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20 + Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20" + Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20" + fi + + local toolset_dir="${_InitializeToolset%/*}" + # new scripts need to work with old packages, so we need to look for the old names/versions + local selectedPath= + local possiblePaths=() + possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" ) + for path in "${possiblePaths[@]}"; do + if [[ -f $path ]]; then + selectedPath=$path + break + fi + done + if [[ -z "$selectedPath" ]]; then + Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly." + ExitWithExitCode 1 + fi + args+=( "-logger:$selectedPath" ) + fi + + MSBuild-Core ${args[@]} +} + +function MSBuild-Core { + if [[ "$ci" == true ]]; then + if [[ "$binary_log" != true && "$exclude_ci_binary_log" != true ]]; then + Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build, or explicitly opted-out from with the -noBinaryLog switch." + ExitWithExitCode 1 + fi + + if [[ "$node_reuse" == true ]]; then + Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build." + ExitWithExitCode 1 + fi + fi + + InitializeBuildTool + + local warnaserror_switch="" + if [[ $warn_as_error == true ]]; then + warnaserror_switch="/warnaserror" + fi + + function RunBuildTool { + export ARCADE_BUILD_TOOL_COMMAND="$_InitializeBuildTool $@" + + "$_InitializeBuildTool" "$@" || { + local exit_code=$? + # We should not Write-PipelineTaskError here because that message shows up in the build summary + # The build already logged an error, that's the reason it failed. Producing an error here only adds noise. + echo "Build failed with exit code $exit_code. Check errors above." + + # When running on Azure Pipelines, override the returned exit code to avoid double logging. + if [[ "$ci" == "true" && -n ${SYSTEM_TEAMPROJECT:-} ]]; then + Write-PipelineSetResult -result "Failed" -message "msbuild execution failed." + # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error + # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error + ExitWithExitCode 0 + else + ExitWithExitCode $exit_code + fi + } + } + + RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" +} + +function GetDarc { + darc_path="$temp_dir/darc" + version="$1" + + if [[ -n "$version" ]]; then + version="--darcversion $version" + fi + + "$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version +} + +ResolvePath "${BASH_SOURCE[0]}" +_script_dir=`dirname "$_ResolvePath"` + +. "$_script_dir/pipeline-logging-functions.sh" + +eng_root=`cd -P "$_script_dir/.." && pwd` +repo_root=`cd -P "$_script_dir/../.." && pwd` +repo_root="${repo_root}/" +artifacts_dir="${repo_root}artifacts" +toolset_dir="$artifacts_dir/toolset" +tools_dir="${repo_root}.tools" +log_dir="$artifacts_dir/log/$configuration" +temp_dir="$artifacts_dir/tmp/$configuration" + +global_json_file="${repo_root}global.json" +# determine if global.json contains a "runtimes" entry +global_json_has_runtimes=false +if command -v jq &> /dev/null; then + if jq -e '.tools | has("runtimes")' "$global_json_file" &> /dev/null; then + global_json_has_runtimes=true + fi +elif [[ "$(cat "$global_json_file")" =~ \"runtimes\"[[:space:]\:]*\{ ]]; then + global_json_has_runtimes=true +fi + +# HOME may not be defined in some scenarios, but it is required by NuGet +if [[ -z $HOME ]]; then + export HOME="${repo_root}artifacts/.home/" + mkdir -p "$HOME" +fi + +mkdir -p "$toolset_dir" +mkdir -p "$temp_dir" +mkdir -p "$log_dir" + +Write-PipelineSetVariable -name "Artifacts" -value "$artifacts_dir" +Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir" +Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir" +Write-PipelineSetVariable -name "Temp" -value "$temp_dir" +Write-PipelineSetVariable -name "TMP" -value "$temp_dir" + +# Import custom tools configuration, if present in the repo. +if [ -z "${disable_configure_toolset_import:-}" ]; then + configure_toolset_script="$eng_root/configure-toolset.sh" + if [[ -a "$configure_toolset_script" ]]; then + . "$configure_toolset_script" + fi +fi + +# TODO: https://github.com/dotnet/arcade/issues/1468 +# Temporary workaround to avoid breaking change. +# Remove once repos are updated. +if [[ -n "${useInstalledDotNetCli:-}" ]]; then + use_installed_dotnet_cli="$useInstalledDotNetCli" +fi diff --git a/eng/key.snk b/eng/key.snk new file mode 100644 index 0000000000000000000000000000000000000000..505d6b970d52ac52a694b63cb80327c8e2a16c9e GIT binary patch literal 596 zcmV-a0;~N80ssI2Bme+XQ$aES1ONa50098)=z??gwfaC}Y%x2$Xlwt_*`Ffd2OoU(a=yy_|`Tvc@%1^4)|Ye%H~PamI1c)E7B;Q?n^ zxP-KZKzT(_JV**aljj9{k4VRGS0;&QLN7JSx!SKZM9U9Lu=SNxLrf+teNchQsG-KU za=7vCG0A8ixp<2AeEBx?hEf6#~I7lNa&a+_KUEgBv!cvvs`0wG9jgU^;}aY%-J=J zNvY);Rqd5`6X*NiQiJ$#XtQl~ShE9q1!nGO+_O2M80 z%{uZ#K@lh+uj9RQgC+Hp5MG`Cdp2IDfLBuJguxP%Wtev23&) zKL`ub^f63XLp;T65n|$u^v58Kv=SerKH@SY9s%;1Fs#)K=Ua~5pYbWr} zRmi~5Yj7dIv!w7)*I>Gy;R5q&Rd)Np}d+B_N2>U4rx- zf9Kpk?)~eWz1FwCjNO??=0Lv z@PFF>okkD{0uW*H7no!p4f#*I9j5&6F~_8T^Z#%D|7-~W0|3c500^dE;PDdyVBUYN z`ENW{-hZu=iYfnlK$ulP{~4(*CWbD~R0J}FrQBK##bpIue zkM2x^^yV`)c2-;n2q=$>0^~<7t8r`in&(Sm9sW^})2u@J7D&osVHM%=a6c!7H&J|4 zk%d4?@?orX0Muvpg~ z&!1cOubZH+K_&!GfqYTf0P3FSA#YYWJNsP7e# z<*endh~{)j)zPu#?q7FlPulb-9S6G#r(90JxSh$erdkq)*U zzG|DOV{_5m%5nqp{!h}I!4|f_$W?gDd3?%Cr)r_s_Kd2%S80B zwd&)KsXjGDA?q}=Ef0(v*{P?MXc_XM4y91M)tepn57GpmkoIOK;7n40EgOa1qpYWw zb;HQ7_R^;YhHXVQ6msm8a;qkq?r=@+XQD^*<8|zX+ipQ7osMQ@pJ)y%YhgnlvvSKK7qxHrPtNzUbCxGg! z?I)wx{n=HyBx!SM9De1DW=jQrA?2s8EsTrX$tGUm+wrxty(PtVC!F z9iFJnw1yYR=~O1ll%1Da8?eMuAq)mLZH`I|Mi2v$Yr^rPy~2t5@Oa(G3(d&7(5gZs zW=M+0RN3EJ>8%~|IcSyTQjlDwem$9Q7f#SxapQ}iFtz>bAM?-GCOkxEe}yx@_{ zyHKWpcJ}5FxKE0mM*isj!;BIqoY6XTXeQ7Rw*SU6Eu@oBuz}017wA>Gy#$k;J@3tm6H4t~jN$%^v1h~)r* zX(+JZQYq)IljrW&3+oodhj7l49`U)qx`7ZDi#eoZ`!|m>Fkf}{K#UR7du&$+e!tV8 z<2(qoIiSaEG2z;5H9*3jAa{Mjm1Wb&MpD9Df|}yn^?tO-LQO#d{@39k-|FykMOGZXU zMKxOhe-d9^{c+SCWNwKlCf=CDgyu7?TRs8%-e0;e< z6%*g-$;q$ZzYVHPU+U^AJ_c|X}V33I7ymYKV5AP;%BKjfmy} zMNq%~Yc}70aHIipF49JbyRAVhM&qo9Gk!FC4kK&E7xj-MiB~b7$&T}3B9oM=v&@Eo zsYIGYnZm(9+w%vRXtVNty1a;0U(o(lUHV5ob_l5K&jp&V$*vB9B!^2C-Sl&`lz0Sei-yLJjV|xgZLrHk^ z>2dYsh`NR?yTk=vpHF}YKAU%ebmg5THw0`=$F3a>54Zxq(IRwpX-gGD>_n)XjB{@3 z3X^^)!!KOczfzW}L>YAiNu}#!kASz{Q(2;lig-Nd%mv=QeT#a=>G#q)#JtTkIM^EZ zrwoG7E2v&F?6EbfQ<&wXY0G!1)T;qo$_PwMajwM;($ zivy?eJYb3eo+HIVgOi#U?yK2y-^=yPScbuG-=uX&-)3YIrFG*+ zBJ+=3wDGY6byZK`i`4iT9R@E!5kgRIgcRoBA16zJ;aO7tA`h;Y+{d;(#QhXfE6qC8 z*W5Qox|$O1JVQ?F1Ky>NnxgEQ$57l zYV#T6aWPjxD?S>9lKAMXZup8hKPfh*E&x$RP(QLu^fNwuO%}9sD&7a&C5U z+-X#zdv!U%0(`}ZmS4=o7U|6WL2jiuLa7BP&@_S{Z6>Nf5gCu`l*?5U56a`c# zadB~v>|sv$5nqJ#i0rnuS0)6A&Dr2y8J-SLDAa!tL1cpnYxR$tnQOe#qJmgNq zH<-_)CG$L8;o~*2F`6BxI!FUGb{O5Jl|sls5;BBruGSzGe=$=Frqly`c~eqSVx^|{ z$b3G=Nrg_16)|7n`n#V;FA|5*h2IU8v>pK))D5wATxoAph$ceG3ap(#@$s(*WbV4s z+h4qBHgZ=1oFj$)m}_EVmlYNp@dXTR+GzgUsdLln$bhO$LaO5E-1`8jl821}UeGW# z!hjLFH~L;ed6JM`1cqxY+ozsEh02$dv0_GwWFI4hj31~x*c)VMX8E<^0Lm#2G^>IA zNX?PO5^+(K{c7Fcjeeobx*uSH1#E9WyOnC0ui;6)0HP2AUa1X{r^47T(&l6(K2+D4 zW0{&^v4nN28yC7B;dOy4T^cm88#P7fhs`87aH)7wLv6mwR#V)bxh_xEgtaHD@?hH> z)Y(s|NHGT-g{D>b`gZEpBym`iYIcWD(2H1Xl^cD|nZ|8ocP>$XFAk4b3A|uT8F?>} zsZxryC_tfxm7dXau@bCtVjismPtUg-E6w@N_z3IVwc?h5B*9N@2VwpBV3zvnL!e3T z(P|IkoueqCnWwYYtih2MVL3wNR9lGoH^n8*SKgAwvX41#Sucup@tdUg^eqUs|p zYE_5%48X|m+ClN9_KRPah~XCpKU%!@RVU}4>A8+T7gMr|*xA?|nJ4CvbCoX*xwhb9 z=Ic1gA*9n)X1UO8q5MNNr==#+MebCunE83b-YUa3fz>s;zpHYj^C&%m0Db#tC#@*J zU&*^sLS8_5M>xj>Ygky=a`1g1tQuBnScB||B214e8Vq@G^JmU1&HVMoNEVTSIe_mD zgA!*2ywv~(NuT5*!O*oCn&DK*2gJcPfRL+N+yX-XqjRpfE|%opbArz;ixX~O93`Z?3W8Nrd0_2FR|nUs{=Tz-EhKn+U+ z2!lxTm76FAud!ihBD0LTyTG%60LkWG| zkqwJ0ujb0LTkKx?a(l`4h*FPmK6s&>EEEda$a?=iA)U(GqC({-nfy{g7EQ5o5))Bw z{#OV|PmX{^n3G5o{$E1hXi3{qh87*3y+>E)`)UW>QXeFJC%rh7HG=MMy~TPJNkYHN z9!fm}Q2L;z~n)F{+&!+)O0`THuDrnrMo%_%(A|!v{CFw}OSF@nP7$9Cdv< z8dCKRGo?jEMNgM~!dt7?8DdJJh#2vDl!JI`k%P&s*&$k_J!NAtJrr;W}lAL0&x>im+D47U-=o+q6H0|U0>d2(A(Y07%*A=aJR zSMagg-M}mbUy{6Q>n|Ly(^j(mDek&-{meQi@yy~`QYE>Sl1NM0RWiLt)d4M>#E(~d zkw~N_Nehs-&k#Ot98X(QoW^DC6{5w?)VJ5G9bqlT!UB*pfcK@~l#{>r@$u0;69O$N zk?pa9Ye_9QIBySuQFI~{MH;03FUxe?49USKEPej=9 zJotOE9`@)|G5^`1c?YQucB8qE5^v>sn()&c1?~qFwHgmV*)=9vMtV^IA1(~#{msZ{ z^+_{@UKoPm_dKe)v!8>X_XlBfx86489b_0E7x>87^1jxEu%2QUMg&eBRu5Klh3NS| zFg_=7-p|C_rcI+42Bd7JjkyyJxCEWo;J%>YyMnuK&@@%eyfT$1gqMHw!?a#g5?M zOX0K=C^7LN=3l}%#rx{ts+C{XAmNJ*PK))gl;OnKdm~z_x9_VEVc&g_67=InKSu9< zn1Ti%E_eybRknTIqLzEkA2qm5fG0NK%W|k?e>_4}RowL#q^;o>X2sx};{kmBR!yA& zUJ37mf3WxdqKFOU+-o#CGIf?)33a)#`_!$fm+v$aNcE0Ly`P{I3!8xt9^%e%b#Gn*0=7kSEEKIPy5s0*cgVK*<)<5o>msVSci9`D1^v zaWxZzpv!gkI;lWW?*kJrFRv=oHVjpW!qd{j7?A7p{a4qV0Gr9xp6J!%*0webvj`^` zw5c&9Vv_v(!+YiB7q>_Tdyk#zSSmDuX4&0XMO5y9Z%6IQXZOez6K|1_ZexY&)`fwRqy| zW^Sy3D4h|0MVn6A!#u%i7FeXKbrU>x{$M2va47$7E$}%cS z#6)g<%f`_Tp=M^J*`DG+YcPp;emvi^1qy(S|yZT7(3G5u`w+{A>zWPE3)tOH}eUbcAU z_tn3e`$d&c(kXfzYn_#oYwjy6#S-31_iU734%b(XhA@Ff5R}>Ceb}!KN-d25Hze~a z1&+`O`lii&K285~cOF5cb-A1LHN-r(CU-}lnCE-@xNWydWKwveos*n;i8%^NP8w?| zNF(JZv`4qGvA&LV9f)BDHN~N<@g$~{zq#=Gpn|x`D(dPP8Z$lJcXt~y45T0()F;!b z=-T9ixhm#0fnZ*Ns@4-cea8_p-I#fb`XPK#Xoz!N;QgEzYnRFPd=0zw(1Z+(8^5=| zzkrpQnVGGe4W`8{cJg_9Jld4J!M>X@xND3mog9ExSoD8BpKrXVaiJD6_T-SeHRrt& z7B)7L+~ewOTdPp1OQf)%;HPOmzmM(K?J$1_1ZOa+f>4^Ylf{lTD#{i4JFcEWJ;dbK z)B!ye8fTbyR;dVK2s-&$FSsAmym~wIink%!s41c~Ko$-=O556;F3~}>Kv0P&-=egc z5ExzJq0m9VkmVZB*a!B9aRH86GxqBfl~orJG{f7z&5UZ1sghirz_w0-nsa%bBgvQ; z*`Wwra&u&iP%<#!Cpl-7MiUDSoe33?N?eim_dpVV#UNhGE!Q)$)r3b6Q9|4t8WeW> zzxv*+uY7t~6-bL>a5;8CHb`}G#21Pyw|}&4zp`8;lu4Q(Fx{e*s)_5UHPizE#?Tg~ z{0H?pAIcUb4FKhW??~&S+q0eeF9vP?RR>@|nXaFK`-PW)>O>#7$ zDy~yX~@+nnX#~v1!OmqlB>h+T7U!!t=Lx0wk z6@lK2)HUGXZvM#|`y+s%3M;D&hhoO~KNkhU1(+pJArVed1BCK!Q2BhE(@*9vC?Gyt zxT3;sphi)hy+~Eb3FpAGKS}3Q96O!xpq=k)gBigugE}PfPi_CcwAy+k9UA52J`Qm;6 z8i=47$r)f5WXL=@D{+SYNrF>(V*`IJ`-bmEmaU^FAxjVDM{4!l)%OcLT+C^JBpZ;E z^0N}wBN@Bbix6q!QiP8j94hcvcBbvt7g4P5dFRC&7tRRU6`>R%uZ?w+NFt-6X`r!I2v`SH)L8{CzYEB z0fI?FJ1^NhLixjjELvgUJ<^9<-I{;5%z`elUSOOzk-=mz4cZw4GSxbl33ea~WkA#n z)*|W2IVqbuFTCk(V>?Q3i>8%U-}7%qObm33`i&Se*igLq$MsHjige* z@9&-wWkHc0BCL2+!}U+DRx(tFBy_{6{Ry+|ZuK^i#=3fGzGoL1`?>)9&4E9sf z(2w-opde~LIAI2qqi)_?^-Mh4uzW(9P$2c;sLro#?T&4NCJo{CbL{({T({i zSKcw*w&8P5TS#DUd4WA?Nw}plI4tnthqJV#@ndT{?8>vP)AEz9p)mtl4R3dz;Wr zQGXrnRtlX!lh;m-AU1iATP}_7Z344PTUv{^MRmUy5q=6XtZI~01X)4*zGN8V7XJ;G z|D|2iI~WB}A5NBtX5GL#)efT|?UbQQ<@6j;DS-O{L>^_zaqonr6iJy0)II z18JjLOeV;`n412TYv6cDIdZXu7OM~%Xv#=R*!GYB`&QeoURlMgyD~cbACI2%ZNd zZijecSvUJ?@=C29-qBXbrjSvSQ@O9zA4-cgxObnfI@}TydJDfF! z%?nK?jn#gp5%}&;0iW48Wn7=Hwp>PXHm^e!0?x&u5c`Mmn2WM2<*Us;Wmm=9KiO`c zJIzOjflY4#^eU;HC<3GwliAyc4m!S0?@*Z!)_+i(@(V^i{J28Pd+Yw5PKolX9Z0O6 zx$p$}mg!^giY9VZhXyE8Y0E1<@s=y=iJ^2OX74aYPrxR1AC`32RW$dm@CKcb!4;Nk zJMd#L{A6N2Pm%}nHX3F~0#fA+w-SlNey{7JJoudY^hsYqOF?^x;5O_H)48cQmT01u zPcPSC=n zMj&9pcZ8_nZIm{r?ni0Wk|5qYl)%wE$x$vI4upb&!ph1@$Z>vrW#vBO&!Ip>L#7IKkrX>MA(NjmpW1Cs3irgo{daqH~5QC@E9$Se5A*WeK@asi|So5FDKA z%S+ajckdh?XN}WRoSmG=PuaoCW>WD8vB5m{OHG23VSV=$#?~77?>Ffm459pBz6%t=yWrJl(`_hc_7kfq_~-zNSCKI5%YELjraDRxpZxFnQCfZ-lg6!b}LTO|b6(R3h zeNQ=~qSdxY7nBpWAf@5SC^7;dEWf7Nb2m)P1D{S21Xe1*NFvn|@tDZkDEDIJ%NC5z zfsx=8jH->!TAWwf1@Nk4Oo&Z35;2PBiBQP3XtLSRW8<@g z3d(=thDyllabtZ8VcnvpQzn=4JCn`TBK|9TF)k$9Q{lAO09RJFG----+ycM*Tzcs| zkAqEv9Z!UdreT*`L;=Cr1R-*vlz_mn+40W9j(hddO_tD00y-h6wQ;>eih{HQj3u{#g2i_{(CJ)KiIk}$bx8#?<&D~3y&L)4a3C3QkX}2a`GQPb0J_NXUJIo zs76hKyN>J}Ck=q^gfix1r$+7P#bN?)7AKy&vxJ3lNlDQa77=KY!pOd^%6!HVS3@u_ zX+nXhtG*pI1u|KaE$aE#u%I(wJBU46Jyrtlti{@-NQ$NjR3c0+G(eM>f^0~ssG@`j zXmOLHKZMIzDcpC7J4BV_W7~njic*@?eqmRuy7_*S#!F$p<&DrF!h8xFKmh;aPJML2 zVV5ffgkEm{>=%={#9s()FZv;t1NI3FOF-%fNkQxh*6;=1A9gNjgsU!G z+&;VWQ_m_%GF>v7JIQHX@fS%$xLZuz>YnAVOK9u={RNf(?|RDvnvHwlr%}G* Uu!Wewf7fvkaCOB>dGoOU1B=T + + + Exe + net8.0;net9.0 + opennetty-daemon + false + + + + + + + + + + + + + + + + + + + + + diff --git a/src/OpenNetty.Daemon/Program.cs b/src/OpenNetty.Daemon/Program.cs new file mode 100644 index 0000000..42dc4c9 --- /dev/null +++ b/src/OpenNetty.Daemon/Program.cs @@ -0,0 +1,19 @@ +using System.Reactive.Linq; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +var builder = Host.CreateApplicationBuilder(); + +builder.Services.AddSystemd() + .AddWindowsService(); + +builder.Services.AddOpenNetty(options => +{ + var file = builder.Environment.ContentRootFileProvider.GetFileInfo("OpenNettyConfiguration.xml"); + options.ImportFromXmlConfiguration(file); + + options.AddMqttIntegration(options => options.ImportFromXmlConfiguration(file)); +}); + +var app = builder.Build(); +await app.RunAsync(); diff --git a/src/OpenNetty.Daemon/appsettings.json b/src/OpenNetty.Daemon/appsettings.json new file mode 100644 index 0000000..70cbfce --- /dev/null +++ b/src/OpenNetty.Daemon/appsettings.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "OpenNetty": "Information" + } + } +} diff --git a/src/OpenNetty.Mqtt/IOpenNettyMqttWorker.cs b/src/OpenNetty.Mqtt/IOpenNettyMqttWorker.cs new file mode 100644 index 0000000..90c8352 --- /dev/null +++ b/src/OpenNetty.Mqtt/IOpenNettyMqttWorker.cs @@ -0,0 +1,23 @@ +using System.Threading.Channels; +using MQTTnet; +using MQTTnet.Extensions.ManagedClient; + +namespace OpenNetty.Mqtt; + +/// +/// Represents a worker responsible for processing incoming MQTT application messages. +/// +public interface IOpenNettyMqttWorker +{ + /// + /// Processes incoming MQTT application messages. + /// + /// The MQTT client. + /// The channel reader used to iterate incoming MQTT application messages. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + Task ProcessMessagesAsync( + IManagedMqttClient client, + ChannelReader reader, + CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/OpenNetty.Mqtt/OpenNetty.Mqtt.csproj b/src/OpenNetty.Mqtt/OpenNetty.Mqtt.csproj new file mode 100644 index 0000000..6b59fde --- /dev/null +++ b/src/OpenNetty.Mqtt/OpenNetty.Mqtt.csproj @@ -0,0 +1,21 @@ + + + + net8.0;net9.0 + + + + + + + + + + + + + + + + + diff --git a/src/OpenNetty.Mqtt/OpenNettyMqttAttributes.cs b/src/OpenNetty.Mqtt/OpenNettyMqttAttributes.cs new file mode 100644 index 0000000..420794f --- /dev/null +++ b/src/OpenNetty.Mqtt/OpenNettyMqttAttributes.cs @@ -0,0 +1,72 @@ +namespace OpenNetty.Mqtt; + +/// +/// Exposes common attributes supported by the OpenNetty MQTT integration. +/// +public static class OpenNettyMqttAttributes +{ + /// + /// Battery. + /// + public const string Battery = "battery"; + + /// + /// Brightness. + /// + public const string Brightness = "brightness"; + + /// + /// Dimming step. + /// + public const string DimmingStep = "dimming_step"; + + /// + /// Power cut active indicator. + /// + public const string IsPowerCutActive = "is_power_cut_active"; + + /// + /// Pilot wire derogation mode. + /// + public const string PilotWireDerogationMode = "pilot_wire_derogation_mode"; + + /// + /// Pilot wire setpoint mode. + /// + public const string PilotWireSetpointMode = "pilot_wire_setpoint_mode"; + + /// + /// Rate type. + /// + public const string RateType = "rate_type"; + + /// + /// Scenario. + /// + public const string Scenario = "scenario"; + + /// + /// Smart meter indexes. + /// + public const string SmartMeterIndexes = "smart_meter_indexes"; + + /// + /// Switch state. + /// + public const string SwitchState = "switch_state"; + + /// + /// Water heater state. + /// + public const string WaterHeaterState = "water_heater_state"; + + /// + /// Water heater setpoint mode. + /// + public const string WaterHeaterSetpointMode = "water_heater_setpoint_mode"; + + /// + /// Wireless burglar alarm state. + /// + public const string WirelessBurglarAlarmState = "wireless_burglar_alarm_state"; +} diff --git a/src/OpenNetty.Mqtt/OpenNettyMqttBuilder.cs b/src/OpenNetty.Mqtt/OpenNettyMqttBuilder.cs new file mode 100644 index 0000000..2df8702 --- /dev/null +++ b/src/OpenNetty.Mqtt/OpenNettyMqttBuilder.cs @@ -0,0 +1,158 @@ +using System.ComponentModel; +using System.Xml.Linq; +using Microsoft.Extensions.FileProviders; +using MQTTnet.Client; +using MQTTnet.Formatter; + +namespace Microsoft.Extensions.DependencyInjection; + +/// +/// Exposes the necessary methods required to configure the OpenNetty MQTT services. +/// +public sealed class OpenNettyMqttBuilder +{ + /// + /// Creates a new instance of . + /// + /// The services collection. + public OpenNettyMqttBuilder(IServiceCollection services) + => Services = services ?? throw new ArgumentNullException(nameof(services)); + + /// + /// Gets the services collection. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public IServiceCollection Services { get; } + + /// + /// Amends the default OpenNetty MQTT configuration. + /// + /// The delegate used to configure the OpenNetty MQTT options. + /// This extension can be safely called multiple times. + /// The instance. + public OpenNettyMqttBuilder Configure(Action configuration) + { + ArgumentNullException.ThrowIfNull(configuration); + + Services.Configure(configuration); + + return this; + } + + /// + /// Sets the MQTT client options used by the OpenNetty MQTT integration. + /// + /// The delegate used to configure the MQTT client options. + /// The instance. + public OpenNettyMqttBuilder SetClientOptions(Action configuration) + { + ArgumentNullException.ThrowIfNull(configuration); + + var builder = new MqttClientOptionsBuilder(); + configuration(builder); + + return Configure(options => options.ClientOptions = builder.Build()); + } + + /// + /// Sets the MQTT root topic dedicated to the OpenNetty MQTT integration. + /// + /// The MQTT root topic dedicated to the OpenNetty MQTT integration. + /// The instance. + public OpenNettyMqttBuilder SetRootTopic(string topic) + { + ArgumentException.ThrowIfNullOrEmpty(topic); + + return Configure(options => options.RootTopic = topic); + } + + /// + /// Imports the OpenNetty MQTT configuration from the specified . + /// + /// The file. + /// The instance. + public OpenNettyMqttBuilder ImportFromXmlConfiguration(IFileInfo file) + { + ArgumentNullException.ThrowIfNull(file); + + if (!file.Exists) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0077)); + } + + using var stream = file.CreateReadStream(); + return ImportFromXmlConfiguration(stream); + } + + /// + /// Imports the OpenNetty MQTT configuration from the specified . + /// + /// The file path. + /// The instance. + public OpenNettyMqttBuilder ImportFromXmlConfiguration(string path) + { + ArgumentException.ThrowIfNullOrEmpty(path); + + if (!File.Exists(path)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0077)); + } + + return ImportFromXmlConfiguration(XDocument.Load(path)); + } + + /// + /// Imports the OpenNetty MQTT configuration from the specified . + /// + /// The stream. + /// The instance. + public OpenNettyMqttBuilder ImportFromXmlConfiguration(Stream stream) + { + ArgumentNullException.ThrowIfNull(stream); + + return ImportFromXmlConfiguration(XDocument.Load(stream)); + } + + /// + /// Imports the OpenNetty MQTT configuration from the specified . + /// + /// The document. + /// The instance. + public OpenNettyMqttBuilder ImportFromXmlConfiguration(XDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + if (document.Root?.Name != "Configuration") + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0078)); + } + + var element = document.Root.Element("Mqtt") ?? throw new InvalidOperationException(SR.FormatID0103("Mqtt")); + var builder = new MqttClientOptionsBuilder(); + + builder.WithTcpServer((string?) element.Attribute("Server") ?? throw new InvalidOperationException(SR.FormatID0104("Server"))); + builder.WithProtocolVersion(MqttProtocolVersion.V500); + + var username = (string?) element.Attribute("Username"); + var password = (string?) element.Attribute("Password"); + + if (!string.IsNullOrEmpty(username) && !string.IsNullOrEmpty(password)) + { + builder.WithCredentials(username, password); + } + + return Configure(options => options.ClientOptions = builder.Build()); + } + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object? obj) => base.Equals(obj); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => base.GetHashCode(); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override string? ToString() => base.ToString(); +} diff --git a/src/OpenNetty.Mqtt/OpenNettyMqttConfiguration.cs b/src/OpenNetty.Mqtt/OpenNettyMqttConfiguration.cs new file mode 100644 index 0000000..6915c44 --- /dev/null +++ b/src/OpenNetty.Mqtt/OpenNettyMqttConfiguration.cs @@ -0,0 +1,19 @@ +using System.ComponentModel; +using Microsoft.Extensions.Options; + +namespace OpenNetty.Mqtt; + +/// +/// Exposes extensions allowing to register the OpenNetty MQTT services. +/// +[EditorBrowsable(EditorBrowsableState.Never)] +public sealed class OpenNettyMqttConfiguration : IPostConfigureOptions +{ + /// + public void PostConfigure(string? name, OpenNettyMqttOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + options.EndpointNameProvider ??= static endpoint => endpoint.Name?.ToLowerInvariant(); + } +} diff --git a/src/OpenNetty.Mqtt/OpenNettyMqttExtensions.cs b/src/OpenNetty.Mqtt/OpenNettyMqttExtensions.cs new file mode 100644 index 0000000..1f031e3 --- /dev/null +++ b/src/OpenNetty.Mqtt/OpenNettyMqttExtensions.cs @@ -0,0 +1,60 @@ +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using MQTTnet; +using MQTTnet.Extensions.ManagedClient; + +namespace Microsoft.Extensions.DependencyInjection; + +/// +/// Exposes extensions allowing to register the OpenNetty MQTT services. +/// +public static class OpenNettyMqttExtensions +{ + /// + /// Registers the OpenNetty MQTT services in the DI container. + /// + /// The services builder used by OpenNetty to register new services. + /// This extension can be safely called multiple times. + /// The instance. + public static OpenNettyMqttBuilder AddMqttIntegration(this OpenNettyBuilder builder) + { + ArgumentNullException.ThrowIfNull(builder); + + builder.Services.AddOptionsWithValidateOnStart(); + + builder.Services.TryAddSingleton(static provider => new MqttFactory()); + builder.Services.TryAddSingleton(static provider => + { + var factory = provider.GetRequiredService(); + return factory.CreateManagedMqttClient(); + }); + + builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton< + IOpenNettyHandler, OpenNettyMqttHostedService>()); + builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton< + IPostConfigureOptions, OpenNettyMqttConfiguration>()); + + builder.Services.TryAddSingleton(); + + builder.Services.AddHostedService(); + + return new OpenNettyMqttBuilder(builder.Services); + } + + /// + /// Registers the OpenNetty MQTT services in the DI container. + /// + /// The services builder used by OpenNetty to register new services. + /// The configuration delegate used to configure the core services. + /// This extension can be safely called multiple times. + /// The instance. + public static OpenNettyBuilder AddMqttIntegration(this OpenNettyBuilder builder, Action configuration) + { + ArgumentNullException.ThrowIfNull(builder); + ArgumentNullException.ThrowIfNull(configuration); + + configuration(builder.AddMqttIntegration()); + + return builder; + } +} diff --git a/src/OpenNetty.Mqtt/OpenNettyMqttHostedService.cs b/src/OpenNetty.Mqtt/OpenNettyMqttHostedService.cs new file mode 100644 index 0000000..6e790ba --- /dev/null +++ b/src/OpenNetty.Mqtt/OpenNettyMqttHostedService.cs @@ -0,0 +1,367 @@ +using System.ComponentModel; +using System.Globalization; +using System.Net.Mime; +using System.Reactive.Disposables; +using System.Reactive.Linq; +using System.Text; +using System.Text.Json.Nodes; +using System.Threading.Channels; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Options; +using MQTTnet; +using MQTTnet.Client; +using MQTTnet.Extensions.ManagedClient; +using MQTTnet.Protocol; + +namespace OpenNetty.Mqtt; + +/// +/// Contains the logic necessary to propage OpenNetty events over MQTT. +/// +[EditorBrowsable(EditorBrowsableState.Never)] +public sealed class OpenNettyMqttHostedService : BackgroundService, IOpenNettyHandler +{ + private readonly IManagedMqttClient _client; + private readonly OpenNettyEvents _events; + private readonly IOptionsMonitor _options; + private readonly IOpenNettyMqttWorker _worker; + + /// + /// Creates a new instance of the class. + /// + /// The MQTT client. + /// The OpenNetty events. + /// The OpenNetty MQTT options. + /// The OpenNetty MQTT worker. + public OpenNettyMqttHostedService( + IManagedMqttClient client, + OpenNettyEvents events, + IOptionsMonitor options, + IOpenNettyMqttWorker worker) + { + _client = client ?? throw new ArgumentNullException(nameof(client)); + _events = events ?? throw new ArgumentNullException(nameof(events)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _worker = worker ?? throw new ArgumentNullException(nameof(worker)); + } + + /// + async ValueTask IOpenNettyHandler.SubscribeAsync() + { + return StableCompositeAsyncDisposable.Create( + [ + await _events.BasicScenarioReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.Scenario, "action")) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.BatteryLevelReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.Battery, + arguments.Level.ToString(CultureInfo.InvariantCulture))) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.BrightnessReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.Brightness, + arguments.Level.ToString(CultureInfo.InvariantCulture))) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.DimmingStepReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.DimmingStep, + arguments.Delta.ToString(CultureInfo.InvariantCulture))) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.OnOffScenarioReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.Scenario, + arguments.State is OpenNettyModels.Lighting.SwitchState.Off ? "OFF" : "ON")) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.PilotWireDerogationModeReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.PilotWireDerogationMode, arguments.Mode switch + { + OpenNettyModels.TemperatureControl.PilotWireMode.Comfort => arguments.Duration switch + { + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None => "comfort", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours => "comfort:4h", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours => "comfort:8h", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne => arguments.Duration switch + { + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None => "comfort-1", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours => "comfort-1:4h", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours => "comfort-1:8h", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo => arguments.Duration switch + { + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None => "comfort-2", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours => "comfort-2:4h", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours => "comfort-2:8h", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + + OpenNettyModels.TemperatureControl.PilotWireMode.Eco => arguments.Duration switch + { + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None => "eco", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours => "eco:4h", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours => "eco:8h", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + + OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection => arguments.Duration switch + { + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None => "frost_protection", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours => "frost_protection:4h", + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours => "frost_protection:8h", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + + _ => "none" + })) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.PilotWireSetpointModeReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.PilotWireSetpointMode, arguments.Mode switch + { + OpenNettyModels.TemperatureControl.PilotWireMode.Comfort => "comfort", + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne => "comfort-1", + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo => "comfort-2", + OpenNettyModels.TemperatureControl.PilotWireMode.Eco => "eco", + OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection => "frost_protection", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + })) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.ProgressiveScenarioReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportJsonAsync(arguments.Endpoint, OpenNettyMqttAttributes.Scenario, new JsonObject() + { + ["scenario_type"] = "progressive", + ["duration"] = arguments.Duration.TotalSeconds + })) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.SmartMeterIndexesReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportJsonAsync(arguments.Endpoint, OpenNettyMqttAttributes.SmartMeterIndexes, new JsonObject() + { + ["base_index"] = arguments.Indexes.BaseIndex, + ["blue_index"] = arguments.Indexes.BlueIndex, + ["off_peak_index"] = arguments.Indexes.OffPeakIndex, + ["red_index"] = arguments.Indexes.RedIndex, + ["white_index"] = arguments.Indexes.WhiteIndex, + ["subscription_type"] = arguments.Indexes.SubscriptionType switch + { + OpenNettyModels.TemperatureControl.SmartMeterSubscriptionType.Base => "base", + OpenNettyModels.TemperatureControl.SmartMeterSubscriptionType.OffPeak => "off_peak", + OpenNettyModels.TemperatureControl.SmartMeterSubscriptionType.Tempo => "tempo", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + } + })) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.SmartMeterPowerCutModeReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.IsPowerCutActive, + arguments.Active ? "1" : "0")) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.SmartMeterRateTypeReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.RateType, arguments.Type switch + { + OpenNettyModels.TemperatureControl.SmartMeterRateType.Peak => "peak", + OpenNettyModels.TemperatureControl.SmartMeterRateType.OffPeak => "off_peak", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + })) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.SwitchStateReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.SwitchState, + arguments.State is OpenNettyModels.Lighting.SwitchState.Off ? "OFF": "ON")) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.TimedScenarioReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportJsonAsync(arguments.Endpoint, OpenNettyMqttAttributes.Scenario, new JsonObject() + { + ["scenario_type"] = "timed", + ["duration"] = arguments.Duration.TotalSeconds + })) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.ToggleScenarioReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.Scenario, "toggle")) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.WaterHeaterSetpointModeReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.WaterHeaterSetpointMode, arguments.Mode switch + { + OpenNettyModels.TemperatureControl.WaterHeaterMode.Automatic => "automatic", + OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOff => "forced_off", + OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOn => "forced_on", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + })) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.WaterHeaterStateReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.WaterHeaterState, arguments.State switch + { + OpenNettyModels.TemperatureControl.WaterHeaterState.Idle => "idle", + OpenNettyModels.TemperatureControl.WaterHeaterState.Heating => "heating", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + })) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask), + + await _events.WirelessBurglarAlarmStateReported + .Where(static arguments => !string.IsNullOrEmpty(arguments.Endpoint.Name)) + .Do(arguments => ReportStringAsync(arguments.Endpoint, OpenNettyMqttAttributes.WirelessBurglarAlarmState, arguments.State switch + { + OpenNettyModels.Alarm.WirelessBurglarAlarmState.Disarmed => "disarmed", + OpenNettyModels.Alarm.WirelessBurglarAlarmState.Armed => "armed", + OpenNettyModels.Alarm.WirelessBurglarAlarmState.PartiallyArmed => "partially_armed", + OpenNettyModels.Alarm.WirelessBurglarAlarmState.ExitDelayElapsed => "exit_delay_elapsed", + OpenNettyModels.Alarm.WirelessBurglarAlarmState.Triggered => "triggered", + OpenNettyModels.Alarm.WirelessBurglarAlarmState.EventDetected => "event_detected", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + })) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask) + ]); + + async ValueTask ReportStringAsync(OpenNettyEndpoint endpoint, string attribute, string value) + { + var topic = GetMessageTopic(endpoint, attribute); + if (string.IsNullOrEmpty(topic)) + { + return; + } + + await _client.EnqueueAsync(new MqttApplicationMessageBuilder() + .WithPayload(value) + .WithPayloadFormatIndicator(MqttPayloadFormatIndicator.CharacterData) + .WithTopic(topic) + .Build()); + } + + async ValueTask ReportJsonAsync(OpenNettyEndpoint endpoint, string attribute, JsonNode value) + { + var topic = GetMessageTopic(endpoint, attribute); + if (string.IsNullOrEmpty(topic)) + { + return; + } + + await _client.EnqueueAsync(new MqttApplicationMessageBuilder() + .WithContentType(MediaTypeNames.Application.Json) + .WithPayload(value.ToJsonString()) + .WithPayloadFormatIndicator(MqttPayloadFormatIndicator.CharacterData) + .WithTopic(topic) + .Build()); + } + + string? GetMessageTopic(OpenNettyEndpoint endpoint, string attribute) + { + var name = _options.CurrentValue.EndpointNameProvider(endpoint); + if (string.IsNullOrEmpty(name)) + { + return null; + } + + return new StringBuilder() + .Append(_options.CurrentValue.RootTopic) + .Append('/') + .Append(name) + .Append('/') + .Append(attribute) + .ToString(); + } + } + + /// + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + if (_options.CurrentValue.ClientOptions is not MqttClientOptions options) + { + return; + } + + // Create the channel that will be used to dispatch MQTT messages to multiple handlers + // to allow for parallel processing of commands. Note: operations pointing to the same + // endpoint are always handled serially to ensure the order of commands is respected. + var channel = Channel.CreateUnbounded(new UnboundedChannelOptions + { + AllowSynchronousContinuations = false, + SingleReader = false, + SingleWriter = true + }); + + // Note: MQTTnet's ApplicationMessageReceivedAsync event is never called concurrently + // even when multiple messages are received at the same time (which guarantees that + // messages can be processed in the same order as they are received). As such, it is + // safe to set SingleWriter to true in the channel options. + _client.ApplicationMessageReceivedAsync += async (MqttApplicationMessageReceivedEventArgs arguments) => + { + await channel.Writer.WriteAsync(arguments.ApplicationMessage); + }; + + // Start the managed MQTT client. + await _client.StartAsync(new ManagedMqttClientOptions + { + ClientOptions = options + }); + + try + { + await _client.SubscribeAsync($"{_options.CurrentValue.RootTopic}/#", MqttQualityOfServiceLevel.ExactlyOnce); + + // Ask the worker to process incoming messages for this MQTT client. + await _worker.ProcessMessagesAsync(_client, channel.Reader, stoppingToken); + } + + finally + { + // Stop the managed MQTT client. + await _client.StopAsync(); + } + } +} diff --git a/src/OpenNetty.Mqtt/OpenNettyMqttOperation.cs b/src/OpenNetty.Mqtt/OpenNettyMqttOperation.cs new file mode 100644 index 0000000..7d43366 --- /dev/null +++ b/src/OpenNetty.Mqtt/OpenNettyMqttOperation.cs @@ -0,0 +1,17 @@ +namespace OpenNetty.Mqtt; + +/// +/// Represents an OpenNetty MQTT operation. +/// +public enum OpenNettyMqttOperation +{ + /// + /// Get (/get). + /// + Get = 0, + + /// + /// Set (/set). + /// + Set = 1 +} diff --git a/src/OpenNetty.Mqtt/OpenNettyMqttOptions.cs b/src/OpenNetty.Mqtt/OpenNettyMqttOptions.cs new file mode 100644 index 0000000..1f844da --- /dev/null +++ b/src/OpenNetty.Mqtt/OpenNettyMqttOptions.cs @@ -0,0 +1,28 @@ +using MQTTnet.Client; + +namespace OpenNetty.Mqtt; + +/// +/// Provides various settings needed to configure the OpenNetty MQTT services. +/// +public sealed class OpenNettyMqttOptions +{ + /// + /// Gets or sets the MQTT client options. + /// + public MqttClientOptions? ClientOptions { get; set; } + + /// + /// Gets or sets the delegate responsible for resolving and, + /// if applicable, normalizing the name associated with an endpoint. + /// + /// + /// By default, OpenNetty always lowercases the endpoint name. + /// + public Func EndpointNameProvider { get; set; } = default!; + + /// + /// Gets or sets the MQTT root topic (by default, "opennetty"). + /// + public string RootTopic { get; set; } = "opennetty"; +} diff --git a/src/OpenNetty.Mqtt/OpenNettyMqttWorker.cs b/src/OpenNetty.Mqtt/OpenNettyMqttWorker.cs new file mode 100644 index 0000000..63cd2c3 --- /dev/null +++ b/src/OpenNetty.Mqtt/OpenNettyMqttWorker.cs @@ -0,0 +1,370 @@ +using System.Globalization; +using System.Net.Mime; +using System.Reactive.Concurrency; +using System.Reactive.Linq; +using System.Runtime.CompilerServices; +using System.Text.Json.Nodes; +using System.Threading.Channels; +using MQTTnet; +using MQTTnet.Extensions.ManagedClient; +using MQTTnet.Protocol; + +namespace OpenNetty.Mqtt; + +/// +/// Represents a worker responsible for processing incoming MQTT application messages. +/// +public sealed class OpenNettyMqttWorker : IOpenNettyMqttWorker +{ + private readonly OpenNettyController _controller; + private readonly OpenNettyLogger _logger; + private readonly OpenNettyManager _manager; + + /// + /// Creates a new instance of the class. + /// + /// The OpenNetty controller. + /// The OpenNetty logger. + /// The OpenNetty manager. + public OpenNettyMqttWorker( + OpenNettyController controller, + OpenNettyLogger logger, + OpenNettyManager manager) + { + _controller = controller ?? throw new ArgumentNullException(nameof(controller)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _manager = manager ?? throw new ArgumentNullException(nameof(manager)); + } + + /// + public async Task ProcessMessagesAsync( + IManagedMqttClient client, + ChannelReader reader, + CancellationToken cancellationToken) + { + await using var subscription = await AsyncObservable.Create(observer => + TaskPoolAsyncScheduler.Default.ScheduleAsync(async cancellationToken => + { + while (!cancellationToken.IsCancellationRequested) + { + try + { + if (!await reader.WaitToReadAsync(cancellationToken)) + { + await observer.OnCompletedAsync(); + return; + } + + while (reader.TryRead(out MqttApplicationMessage? message)) + { + await observer.OnNextAsync(message); + } + } + + catch (ChannelClosedException) + { + await observer.OnCompletedAsync(); + return; + } + + catch (Exception exception) + { + await observer.OnErrorAsync(exception); + } + } + })) + .SelectMany(async message => + { + var (name, attribute, operation) = ExtractParameters(message); + + if (string.IsNullOrEmpty(name) || + string.IsNullOrEmpty(attribute) || + operation is not (OpenNettyMqttOperation.Get or OpenNettyMqttOperation.Set) || + await _manager.FindEndpointByNameAsync(name) is not OpenNettyEndpoint endpoint) + { + return AsyncObservable.Empty<(MqttApplicationMessage Message, OpenNettyEndpoint Endpoint, string Attribute, OpenNettyMqttOperation Operation)>(); + } + + return AsyncObservable.Return((Message: message, Endpoint: endpoint, Attribute: attribute, Operation: operation.Value)); + }) + .GroupBy(static arguments => arguments.Endpoint.Name) + .Do(async group => await group + .ObserveOn(TaskPoolAsyncScheduler.Default) + .Do(async arguments => + { + var (message, endpoint, attribute, operation) = arguments; + + try + { + switch (attribute.ToLowerInvariant()) + { + case OpenNettyMqttAttributes.Brightness when operation is OpenNettyMqttOperation.Get: + _ = await _controller.GetBrightnessAsync(endpoint); + break; + + case OpenNettyMqttAttributes.Brightness when operation is OpenNettyMqttOperation.Set: + if (!ushort.TryParse(message.PayloadSegment, CultureInfo.InvariantCulture, out var level)) + { + throw new InvalidDataException(SR.GetResourceString(SR.ID0075)); + } + + await _controller.SetBrightnessAsync(endpoint, level); + break; + + case OpenNettyMqttAttributes.PilotWireDerogationMode when operation is OpenNettyMqttOperation.Get: + case OpenNettyMqttAttributes.PilotWireSetpointMode when operation is OpenNettyMqttOperation.Get: + _ = await _controller.GetPilotWireConfigurationAsync(endpoint); + break; + + case OpenNettyMqttAttributes.PilotWireDerogationMode when operation is OpenNettyMqttOperation.Set: + switch (message.ConvertPayloadToString()?.ToLowerInvariant()) + { + case "comfort": + await _controller.SetPilotWireSetpointModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.Comfort); + break; + + case "comfort-1": + await _controller.SetPilotWireSetpointModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne); + break; + + case "comfort-2": + await _controller.SetPilotWireSetpointModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo); + break; + + case "eco": + await _controller.SetPilotWireSetpointModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.Eco); + break; + + case "frost_protection": + await _controller.SetPilotWireSetpointModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection); + break; + } + break; + + case OpenNettyMqttAttributes.PilotWireSetpointMode when operation is OpenNettyMqttOperation.Set: + switch (message.ConvertPayloadToString()?.ToLowerInvariant()) + { + case "none": + await _controller.CancelPilotWireDerogationModeAsync(endpoint); + break; + + case "comfort": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.Comfort, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None); + break; + + case "comfort:4h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.Comfort, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours); + break; + + case "comfort:8h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.Comfort, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours); + break; + + case "comfort-1": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None); + break; + + case "comfort-1:4h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours); + break; + + case "comfort-1:8h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours); + break; + + case "comfort-2": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None); + break; + + case "comfort-2:4h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours); + break; + + case "comfort-2:8h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours); + break; + + case "eco": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.Eco, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None); + break; + + case "eco:4h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.Eco, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours); + break; + + case "eco:8h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.Eco, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours); + break; + + case "frost_protection": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None); + break; + + case "frost_protection:4h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours); + break; + + case "frost_protection:8h": + await _controller.SetPilotWireDerogationModeAsync(endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours); + break; + } + break; + + case OpenNettyMqttAttributes.RateType or OpenNettyMqttAttributes.IsPowerCutActive + when operation is OpenNettyMqttOperation.Get: + _ = await _controller.GetSmartMeterInformationAsync(endpoint); + break; + + case OpenNettyMqttAttributes.Scenario when operation is OpenNettyMqttOperation.Set: + switch (message.ConvertPayloadToString()?.ToLowerInvariant()) + { + case "action" when endpoint.HasCapability(OpenNettyCapabilities.BasicScenario): + await _controller.DispatchBasicScenarioAsync(endpoint); + break; + + case "on" when endpoint.HasCapability(OpenNettyCapabilities.OnOffScenario): + await _controller.DispatchOnOffScenarioAsync(endpoint, OpenNettyModels.Lighting.SwitchState.On); + break; + + case "off" when endpoint.HasCapability(OpenNettyCapabilities.OnOffScenario): + await _controller.DispatchOnOffScenarioAsync(endpoint, OpenNettyModels.Lighting.SwitchState.Off); + break; + } + break; + + case OpenNettyMqttAttributes.SmartMeterIndexes when operation is OpenNettyMqttOperation.Get: + _ = await _controller.GetSmartMeterIndexesAsync(endpoint); + break; + + case OpenNettyMqttAttributes.SwitchState when operation is OpenNettyMqttOperation.Get: + _ = await _controller.GetSwitchStateAsync(endpoint); + break; + + case OpenNettyMqttAttributes.SwitchState when operation is OpenNettyMqttOperation.Set: + switch (message.ConvertPayloadToString()?.ToLowerInvariant()) + { + case "on": + await _controller.SwitchOnAsync(endpoint); + break; + + case "off": + await _controller.SwitchOffAsync(endpoint); + break; + + case "toggle": + await _controller.ToggleAsync(endpoint); + break; + } + break; + + case OpenNettyMqttAttributes.WaterHeaterSetpointMode when operation is OpenNettyMqttOperation.Set: + switch (message.ConvertPayloadToString()?.ToLowerInvariant()) + { + case "forced_off": + await _controller.SetWaterHeaterSetpointModeAsync(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOff); + break; + + case "forced_on": + await _controller.SetWaterHeaterSetpointModeAsync(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOn); + break; + + case "automatic": + await _controller.SetWaterHeaterSetpointModeAsync(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode.Automatic); + break; + } + break; + + case OpenNettyMqttAttributes.WaterHeaterState when operation is OpenNettyMqttOperation.Get: + _ = await _controller.GetWaterHeaterStateAsync(endpoint); + break; + } + + if (!string.IsNullOrEmpty(message.ResponseTopic)) + { + await client.EnqueueAsync(new MqttApplicationMessageBuilder() + .WithCorrelationData(message.CorrelationData) + .WithTopic(message.ResponseTopic) + .Build()); + } + } + + catch (OpenNettyException exception) when (!string.IsNullOrEmpty(message.ResponseTopic)) + { + await client.EnqueueAsync(new MqttApplicationMessageBuilder() + .WithContentType(MediaTypeNames.Application.Json) + .WithCorrelationData(message.CorrelationData) + .WithPayload(new JsonObject { ["error"] = exception.Message }.ToJsonString()) + .WithPayloadFormatIndicator(MqttPayloadFormatIndicator.CharacterData) + .WithTopic(message.ResponseTopic) + .Build()); + + throw; + } + }) + .Do(_logger.UnhandledEventHandlerException) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask)) + .Retry() + .SubscribeAsync(static arguments => ValueTask.CompletedTask); + + await WaitCancellationAsync(cancellationToken); + } + + static (string? FriendlyName, string? attribute, OpenNettyMqttOperation? Operation) ExtractParameters(MqttApplicationMessage message) + => message.Topic.Split('/', StringSplitOptions.RemoveEmptyEntries) switch + { + [_, .. string[] topics, string attribute, string operation] when string.Equals(operation, "get", StringComparison.OrdinalIgnoreCase) + => (string.Join('/', topics), attribute, OpenNettyMqttOperation.Get), + + [_, .. string[] topics, string attribute, string operation] when string.Equals(operation, "set", StringComparison.OrdinalIgnoreCase) + => (string.Join('/', topics), attribute, OpenNettyMqttOperation.Set), + + _ => (null, null, null) + }; + + [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + static async Task WaitCancellationAsync(CancellationToken cancellationToken) + { + var source = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + using var registration = cancellationToken.Register(static state => ((TaskCompletionSource) state!).SetResult(), source); + await source.Task; + } +} diff --git a/src/OpenNetty/IOpenNettyHandler.cs b/src/OpenNetty/IOpenNettyHandler.cs new file mode 100644 index 0000000..b1b2315 --- /dev/null +++ b/src/OpenNetty/IOpenNettyHandler.cs @@ -0,0 +1,21 @@ +namespace OpenNetty; + +/// +/// Represents an OpenNetty handler whose lifetime is controlled by the OpenNetty hosted service. +/// +/// +/// This interface is typically used to subscribe to events before the +/// OpenNetty stack starts establishing sessions and processing events. +/// +public interface IOpenNettyHandler +{ + /// + /// Subscribes to OpenNetty events. + /// + /// + /// A that can be used to monitor the asynchronous + /// operation and whose result is used by the OpenNetty hosted service to inform + /// the handler that the subscription should be aborted and discarded. + /// + ValueTask SubscribeAsync(); +} diff --git a/src/OpenNetty/IOpenNettyPipeline.cs b/src/OpenNetty/IOpenNettyPipeline.cs new file mode 100644 index 0000000..981d816 --- /dev/null +++ b/src/OpenNetty/IOpenNettyPipeline.cs @@ -0,0 +1,29 @@ +using System.ComponentModel; + +namespace OpenNetty; + +/// +/// Represents a thread-safe notification pipeline that can be observed and whose delivery order is guaranteed. +/// +[EditorBrowsable(EditorBrowsableState.Advanced)] +public interface IOpenNettyPipeline : IAsyncObservable +{ + /// + /// Connects the so that notifications can start being processed. + /// + /// + /// A that can be used to monitor the asynchronous + /// operation and whose result is used as a signal by the OpenNetty hosted service + /// to inform the pipeline that no additional notification will be processed. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + ValueTask ConnectAsync(); + + /// + /// Publishes a new notification. + /// + /// The notification to publish. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + ValueTask PublishAsync(OpenNettyNotification notification, CancellationToken cancellationToken = default); +} \ No newline at end of file diff --git a/src/OpenNetty/IOpenNettyService.cs b/src/OpenNetty/IOpenNettyService.cs new file mode 100644 index 0000000..49f1fce --- /dev/null +++ b/src/OpenNetty/IOpenNettyService.cs @@ -0,0 +1,235 @@ +using System.Collections.Immutable; + +namespace OpenNetty; + +/// +/// Represents a low-level service that can be used to send and receive common OpenWebNet messages. +/// +public interface IOpenNettyService +{ + /// + /// Sends a dimension request and iterates the dimension values + /// returned by all the devices matching the specified address. + /// + /// The protocol. + /// The dimension. + /// The address, if applicable. + /// The media to use or to use the default media. + /// The mode to use or to use the default mode. + /// + /// The delegate called by the service to filter the returned dimensions. + /// If set to , only the requested dimension is returned. + /// + /// The gateway used to send the message. + /// The transmission options to use. + /// The that can be used to abort the operation. + /// + /// An that can be used to iterate the + /// dimension values returned by all the devices matching the specified address. + /// + IAsyncEnumerable<(OpenNettyAddress Address, ImmutableArray Values)> EnumerateDimensionsAsync( + OpenNettyProtocol protocol, + OpenNettyDimension dimension, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + Func>? filter = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default); + + /// + /// Sends a status request and iterates the status replies + /// returned by all the devices matching the specified address. + /// + /// The protocol. + /// The category. + /// The address, if applicable. + /// The media to use or to use the default media. + /// The mode to use or to use the default mode. + /// + /// The delegate called by the service to filter the returned status replies. If set to + /// , any bus command matching the specified is returned. + /// + /// The gateway used to send the message. + /// The transmission options to use. + /// The that can be used to abort the operation. + /// + /// An that can be used to iterate the + /// status replies returned by all the devices matching the specified address. + /// + IAsyncEnumerable<(OpenNettyAddress Address, OpenNettyCommand Command)> EnumerateStatusesAsync( + OpenNettyProtocol protocol, + OpenNettyCategory category, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + Func>? filter = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default); + + /// + /// Executes the specified command. + /// + /// The protocol. + /// The command. + /// The address, if applicable. + /// The media to use or to use the default media. + /// The mode to use or to use the default mode. + /// The gateway used to send the message. + /// The transmission options to use. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + ValueTask ExecuteCommandAsync( + OpenNettyProtocol protocol, + OpenNettyCommand command, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default); + + /// + /// Sends a dimension request and returns the dimension values + /// transmitted by the first device matching the specified address. + /// + /// The protocol. + /// The dimension. + /// The address, if applicable. + /// The media to use or to use the default media. + /// The mode to use or to use the default mode. + /// + /// The delegate called by the service to filter the returned dimensions. + /// If set to , only the requested dimension is returned. + /// + /// The gateway used to send the message. + /// The transmission options to use. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and whose + /// result contains the dimension values returned by the first device matching the specified address. + /// + ValueTask> GetDimensionAsync(OpenNettyProtocol protocol, + OpenNettyDimension dimension, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + Func>? filter = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default); + + /// + /// Sends a status request and returns the status reply + /// transmitted by the first device matching the specified address. + /// + /// The protocol. + /// The category. + /// The address, if applicable. + /// The media to use or to use the default media. + /// The mode to use or to use the default mode. + /// + /// The delegate called by the service to filter the returned dimensions. + /// If set to , only the requested dimension is returned. + /// + /// The gateway used to send the message. + /// The transmission options to use. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and whose + /// result contains the dimension values returned by the first device matching the specified address. + /// + ValueTask GetStatusAsync( + OpenNettyProtocol protocol, + OpenNettyCategory category, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + Func>? filter = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default); + + /// + /// Observes all the status replies matching the specified protocol and category. + /// + /// The protocol. + /// The category. + /// The gateway from which status replies should be observed. + /// + /// An that can be used to iterate the status + /// replies returned by all the devices matching the specified protocol and category. + /// + IAsyncObservable<(OpenNettyAddress? Address, OpenNettyCommand Command)> ObserveStatusesAsync( + OpenNettyProtocol protocol, + OpenNettyCategory category, + OpenNettyGateway? gateway = null); + + /// + /// Observes all the dimensions matching the specified protocol and category. + /// + /// The protocol. + /// The category. + /// The gateway from which dimensions should be observed. + /// + /// An that can be used to iterate the dimensions + /// returned by all the devices matching the specified protocol and category. + /// + IAsyncObservable<(OpenNettyAddress? Address, OpenNettyDimension Dimension, ImmutableArray Values)> ObserveDimensionsAsync( + OpenNettyProtocol protocol, + OpenNettyCategory category, + OpenNettyGateway? gateway = null); + + /// + /// Observes all the event messages matching the specified protocol. + /// + /// The protocol. + /// The gateway from which events should be observed. + /// + /// An that can be used to iterate the event + /// messages returned by all the devices matching the specified protocol. + /// + IAsyncObservable ObserveEventsAsync( + OpenNettyProtocol protocol, + OpenNettyGateway? gateway = null); + + /// + /// Sends a raw message. + /// + /// The message. + /// The gateway used to send the message. + /// The transmission options to use. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + ValueTask SendMessageAsync( + OpenNettyMessage message, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default); + + /// + /// Sets the specified dimension. + /// + /// The protocol. + /// The dimension. + /// The dimension values. + /// The address, if applicable. + /// The media to use or to use the default media. + /// The mode to use or to use the default mode. + /// The gateway used to send the message. + /// The transmission options to use. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + ValueTask SetDimensionAsync( + OpenNettyProtocol protocol, + OpenNettyDimension dimension, + ImmutableArray values, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default); +} \ No newline at end of file diff --git a/src/OpenNetty/IOpenNettyWorker.cs b/src/OpenNetty/IOpenNettyWorker.cs new file mode 100644 index 0000000..a0ae693 --- /dev/null +++ b/src/OpenNetty/IOpenNettyWorker.cs @@ -0,0 +1,25 @@ +using System.ComponentModel; +using System.Threading.Channels; + +namespace OpenNetty; + +/// +/// Represents a worker responsible for processing incoming and outgoing notifications. +/// +[EditorBrowsable(EditorBrowsableState.Advanced)] +public interface IOpenNettyWorker +{ + /// + /// Processes incoming and outgoing notifications for the specified gateway. + /// + /// The gateway. + /// The channel reader used to iterate incoming notifications. + /// The channel writer used to dispatch outgoing notifications. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + Task ProcessNotificationsAsync( + OpenNettyGateway gateway, + ChannelReader reader, + ChannelWriter writer, + CancellationToken cancellationToken); +} \ No newline at end of file diff --git a/src/OpenNetty/OpenNetty.csproj b/src/OpenNetty/OpenNetty.csproj new file mode 100644 index 0000000..e9fd563 --- /dev/null +++ b/src/OpenNetty/OpenNetty.csproj @@ -0,0 +1,27 @@ + + + + net8.0;net9.0 + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/OpenNetty/OpenNettyAddress.cs b/src/OpenNetty/OpenNettyAddress.cs new file mode 100644 index 0000000..d29dd83 --- /dev/null +++ b/src/OpenNetty/OpenNettyAddress.cs @@ -0,0 +1,673 @@ +using System.Collections.Immutable; +using System.Diagnostics; +using System.Globalization; +using System.Text; +using static OpenNetty.OpenNettyConstants; + +namespace OpenNetty; + +/// +/// Represents the address of an OpenNetty message. +/// +[DebuggerDisplay("{ToString(),nq} ({Type,nq})")] +public readonly struct OpenNettyAddress : IEquatable +{ + /// + /// Creates a new instance of the structure. + /// + /// The address type. + /// The value. + public OpenNettyAddress(OpenNettyAddressType type, string value) + : this(type, value, []) + { + } + + /// + /// Creates a new instance of the structure. + /// + /// The address type. + /// The value. + /// The additional parameters, if applicable. + public OpenNettyAddress(OpenNettyAddressType type, string value, ImmutableArray parameters) + { + ArgumentNullException.ThrowIfNull(value); + + if (!Enum.IsDefined(type)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0043)); + } + + // Ensure the value only includes ASCII digits. + foreach (var character in value) + { + if (!char.IsAsciiDigit(character)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0004), nameof(value)); + } + } + + // Ensure the parameters only include ASCII digits. + if (!Parameters.IsDefaultOrEmpty) + { + for (var index = 0; index < parameters.Length; index++) + { + foreach (var character in parameters[index]) + { + if (!char.IsAsciiDigit(character)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0004), nameof(value)); + } + } + } + } + + Parameters = parameters; + Type = type; + Value = value; + } + + /// + /// Gets the additional parameters associated with the address, if applicable. + /// + public ImmutableArray Parameters { get; } + + /// + /// Gets the type associated with the address. + /// + public OpenNettyAddressType Type { get; } + + /// + /// Gets the value associated with the address. + /// + public string Value { get; } + + /// + public bool Equals(OpenNettyAddress other) + { + if (Type != other.Type) + { + return false; + } + + if (!string.Equals(Value, other.Value, StringComparison.Ordinal)) + { + return false; + } + + if (!Parameters.IsDefaultOrEmpty && !other.Parameters.IsDefaultOrEmpty) + { + if (Parameters.Length != other.Parameters.Length) + { + return false; + } + + for (var index = 0; index < Parameters.Length; index++) + { + if (!string.Equals(Parameters[index], other.Parameters[index], StringComparison.Ordinal)) + { + return false; + } + } + } + + else if (Parameters.IsDefaultOrEmpty && !other.Parameters.IsDefaultOrEmpty) + { + return false; + } + + else if (!Parameters.IsDefaultOrEmpty && other.Parameters.IsDefaultOrEmpty) + { + return false; + } + + return true; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyAddress address && Equals(address); + + /// + public override int GetHashCode() + { + if (Value is null) + { + return 0; + } + + var hash = new HashCode(); + hash.Add(Type); + hash.Add(Value); + + if (!Parameters.IsDefaultOrEmpty) + { + hash.Add(Parameters.Length); + + for (var index = 0; index < Parameters.Length; index++) + { + hash.Add(Parameters[index]); + } + } + + else + { + hash.Add(0); + } + + return hash.ToHashCode(); + } + + /// + /// Computes the representation of the current address. + /// + /// The representation of the current address. + public override string ToString() + { + if (Value is null) + { + return string.Empty; + } + + if (Parameters.IsDefaultOrEmpty) + { + return Value; + } + + var builder = new StringBuilder(); + builder.Append(Value); + + for (var index = 0; index < Parameters.Length; index++) + { + builder.Append((char) Separators.Hash[0]); + builder.Append(Parameters[index]); + } + + return builder.ToString(); + } + + /// + /// Converts the address to a list of . + /// + /// The list of representing this address. + public ImmutableArray ToParameters() + { + if (Value is null) + { + return []; + } + + var builder = ImmutableArray.CreateBuilder(); + builder.Add(new OpenNettyParameter(Value)); + + if (!Parameters.IsDefaultOrEmpty) + { + for (var index = 0; index < Parameters.Length; index++) + { + builder.Add(new OpenNettyParameter(Parameters[index])); + } + } + + return builder.ToImmutable(); + } + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyAddress left, OpenNettyAddress right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyAddress left, OpenNettyAddress right) => !(left == right); + + /// + /// Creates a Zigbee address based on the specified decimal device identifier and unit. + /// + /// The decimal device identifier or to represent a general address. + /// The unit, or 0 to represent a device address that doesn't point to a specific unit. + /// A Zigbee address based on the specified device identifier and unit. + /// The identifier or unit is not valid. + public static OpenNettyAddress FromDecimalZigbeeAddress(uint? identifier, ushort unit = 0) + { + // Note: Zigbee identifiers are 4-byte long and fit exactly in an + // unsigned 32-bit integer, so a range check is not required. + + if (unit is > 99) + { + throw new ArgumentOutOfRangeException(nameof(unit), SR.GetResourceString(SR.ID0050)); + } + + if (identifier is null) + { + return unit is 0 ? + new OpenNettyAddress(OpenNettyAddressType.ZigbeeAllDevicesAllUnits, "00") : + new OpenNettyAddress(OpenNettyAddressType.ZigbeeAllDevicesSpecificUnit, unit.ToString("00", CultureInfo.InvariantCulture)); + } + + return unit is 0 ? + new OpenNettyAddress(OpenNettyAddressType.ZigbeeSpecificDeviceAllUnits, + identifier.Value.ToString(CultureInfo.InvariantCulture) + "00") : + new OpenNettyAddress(OpenNettyAddressType.ZigbeeSpecificDeviceSpecificUnit, + identifier.Value.ToString(CultureInfo.InvariantCulture) + unit.ToString("00", CultureInfo.InvariantCulture)); + } + + /// + /// Creates a Zigbee address based on the specified hexadecimal device identifier and unit. + /// + /// The hexadecimal device identifier or to represent a general address. + /// The unit, or 0 to represent a device address that doesn't point to a specific unit. + /// A Zigbee address based on the specified device identifier and unit. + /// The identifier is not a valid hexadecimal string. + public static OpenNettyAddress FromHexadecimalZigbeeAddress(string? identifier, ushort unit = 0) + { + if (string.IsNullOrEmpty(identifier)) + { + return FromDecimalZigbeeAddress(null, unit); + } + + if (!uint.TryParse(identifier, NumberStyles.HexNumber, CultureInfo.InvariantCulture, out uint result)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0051), nameof(identifier)); + } + + return FromDecimalZigbeeAddress(result, unit); + } + + /// + /// Creates a Nitoo address based on the specified device identifier and unit. + /// + /// The device identifier. + /// The unit, or 0 to represent a device address that doesn't point to a specific unit. + /// A Nitoo address based on the specified device identifier and unit. + /// The identifier or unit is not valid. + public static OpenNettyAddress FromNitooAddress(uint identifier, ushort unit = 0) + { + if (identifier > Math.Pow(2, 24)) + { + throw new ArgumentOutOfRangeException(nameof(identifier), SR.GetResourceString(SR.ID0044)); + } + + if (unit is > 15) + { + throw new ArgumentOutOfRangeException(nameof(unit), SR.GetResourceString(SR.ID0045)); + } + + return unit is 0 ? + new OpenNettyAddress(OpenNettyAddressType.NitooDevice, (identifier * 16).ToString(CultureInfo.InvariantCulture)) : + new OpenNettyAddress(OpenNettyAddressType.NitooUnit, ((identifier * 16) + unit).ToString(CultureInfo.InvariantCulture)); + } + + /// + /// Creates a SCS light point area address based on the specified area and bus extension. + /// + /// The area. + /// The bus extension (also known as interface), or 0 to represent the private riser. + /// A SCS light point area address based on the specified area and bus extension. + /// The area or bus extension is not valid. + public static OpenNettyAddress FromScsLightPointAreaAddress(ushort area, ushort extension = 0) + { + if (area is > 10) + { + throw new ArgumentOutOfRangeException(nameof(area), SR.GetResourceString(SR.ID0046)); + } + + if (extension is > 15) + { + throw new ArgumentOutOfRangeException(nameof(extension), SR.GetResourceString(SR.ID0047)); + } + + var builder = new StringBuilder(); + + if (area is 0) + { + builder.Append("00"); + } + + else + { + builder.Append(area); + } + + return extension is not 0 ? + new OpenNettyAddress(OpenNettyAddressType.ScsLightPointArea, builder.ToString(), ["4", extension.ToString("00", CultureInfo.InvariantCulture)]) : + new OpenNettyAddress(OpenNettyAddressType.ScsLightPointArea, builder.ToString()); + } + + /// + /// Creates a SCS light point general address based on the specified bus extension. + /// + /// The bus extension (also known as interface), or 0 to represent the private riser. + /// A SCS light point general address based on the specified bus extension. + /// The bus extension is not valid. + public static OpenNettyAddress FromScsLightPointGeneralAddress(ushort extension = 0) + { + if (extension is > 15) + { + throw new ArgumentOutOfRangeException(nameof(extension), SR.GetResourceString(SR.ID0047)); + } + + return extension is not 0 ? + new OpenNettyAddress(OpenNettyAddressType.ScsLightPointGeneral, "0", ["4", extension.ToString("00", CultureInfo.InvariantCulture)]) : + new OpenNettyAddress(OpenNettyAddressType.ScsLightPointGeneral, "0"); + } + + /// + /// Creates a SCS light point group address based on the specified group and bus extension. + /// + /// The group. + /// The bus extension (also known as interface), or 0 to represent the private riser. + /// A SCS light point general address based on the specified group and bus extension. + /// The group or bus extension is not valid. + public static OpenNettyAddress FromScsLightPointGroupAddress(ushort group, ushort extension = 0) + { + if (group is < 1 or > 255) + { + throw new ArgumentOutOfRangeException(nameof(group), SR.GetResourceString(SR.ID0048)); + } + + if (extension is > 15) + { + throw new ArgumentOutOfRangeException(nameof(extension), SR.GetResourceString(SR.ID0047)); + } + + return extension is not 0 ? + new OpenNettyAddress(OpenNettyAddressType.ScsLightPointGroup, string.Empty, [group.ToString(), "4", extension.ToString("00", CultureInfo.InvariantCulture)]) : + new OpenNettyAddress(OpenNettyAddressType.ScsLightPointGroup, string.Empty, [group.ToString()]); + } + + /// + /// Creates a SCS light point point-to-point address based on the specified area, light point and bus extension. + /// + /// The area. + /// The light point. + /// The bus extension (also known as interface), or 0 to represent the private riser. + /// A SCS light point point-to-point address based on the specified area, light point and bus extension. + /// The area, light point or bus extension is not valid. + public static OpenNettyAddress FromScsLightPointPointToPointAddress(ushort area, ushort point, ushort extension = 0) + { + if (area is > 10) + { + throw new ArgumentOutOfRangeException(nameof(area), SR.GetResourceString(SR.ID0046)); + } + + if (point is < 1 or > 15) + { + throw new ArgumentOutOfRangeException(nameof(point), SR.GetResourceString(SR.ID0049)); + } + + if (extension is > 15) + { + throw new ArgumentOutOfRangeException(nameof(extension), SR.GetResourceString(SR.ID0047)); + } + + var builder = new StringBuilder(); + + if (area is 0) + { + builder.Append("00"); + } + + else if (point is >= 10) + { + builder.Append(area.ToString("00", CultureInfo.InvariantCulture)); + } + + else + { + builder.Append(area); + } + + if (area is 0 or 10) + { + builder.Append(point.ToString("00", CultureInfo.InvariantCulture)); + } + + else + { + builder.Append(point); + } + + return extension is not 0 ? + new OpenNettyAddress(OpenNettyAddressType.ScsLightPointPointToPoint, builder.ToString(), ["4", extension.ToString("00", CultureInfo.InvariantCulture)]) : + new OpenNettyAddress(OpenNettyAddressType.ScsLightPointPointToPoint, builder.ToString()); + } + + /// + /// Determines whether the specified address is a Nitoo address. + /// + /// The address. + /// if the address is a Nitoo address, otherwise. + public static bool IsNitooAddress(OpenNettyAddress address) + => address.Type is OpenNettyAddressType.NitooDevice or OpenNettyAddressType.NitooUnit; + + /// + /// Determines whether the specified address is a SCS address. + /// + /// The address. + /// if the address is a SCS address, otherwise. + public static bool IsScsAddress(OpenNettyAddress address) + => address.Type is OpenNettyAddressType.ScsLightPointArea or OpenNettyAddressType.ScsLightPointGeneral or + OpenNettyAddressType.ScsLightPointGroup or OpenNettyAddressType.ScsLightPointPointToPoint; + + /// + /// Determines whether the specified address is a Zigbee address. + /// + /// The address. + /// if the address is a Zigbee address, otherwise. + public static bool IsZigbeeAddress(OpenNettyAddress address) + => address.Type is OpenNettyAddressType.ZigbeeAllDevicesAllUnits or OpenNettyAddressType.ZigbeeAllDevicesSpecificUnit or + OpenNettyAddressType.ZigbeeSpecificDeviceAllUnits or OpenNettyAddressType.ZigbeeSpecificDeviceSpecificUnit; + + /// + /// Converts the specified address to a Nitoo address. + /// + /// The address. + /// A Nitoo address based on the specified address. + /// The address doesn't represent a valid Nitoo address. + public static (uint Identifier, ushort Unit) ToNitooAddress(OpenNettyAddress address) + { + if (!IsNitooAddress(address)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0052), nameof(address)); + } + + if (!uint.TryParse(address.Value, CultureInfo.InvariantCulture, out uint value) || value > Math.Pow(2, 24)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0053), nameof(address)); + } + + return (Identifier: value / 16, Unit: (ushort) (value % 16)); + } + + /// + /// Converts the specified address to a SCS light point area address. + /// + /// The address. + /// A SCS light point area address based on the specified address. + /// The address doesn't represent a valid SCS light point area address. + public static (ushort? Extension, ushort? Area) ToScsLightPointAreaAddress(OpenNettyAddress address) + { + if (address.Type is not OpenNettyAddressType.ScsLightPointArea) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0054), nameof(address)); + } + + if (address.Value is not ("00" or "1" or "2" or "3" or "4" or "5" or "6" or "7" or "8" or "9" or "10")) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0055), nameof(address)); + } + + return address.Parameters switch + { + { IsDefaultOrEmpty: true } when ushort.TryParse(address.Value, CultureInfo.InvariantCulture, out ushort area) && area is >= 0 and <= 10 + => (Extension: null, Area: area), + + ["4", string value] when + ushort.TryParse(address.Value, CultureInfo.InvariantCulture, out ushort area) && + ushort.TryParse(value, CultureInfo.InvariantCulture, out ushort extension) && extension is >= 0 and <= 15 + => (Extension: extension, Area: area), + + _ => throw new ArgumentException(SR.GetResourceString(SR.ID0055), nameof(address)), + }; + } + + /// + /// Converts the specified address to a SCS light point general address. + /// + /// The address. + /// The bus extension, if applicable. + /// The address doesn't represent a valid SCS light point general address. + public static ushort? ToScsLightPointGeneralAddress(OpenNettyAddress address) + { + if (address.Type is not OpenNettyAddressType.ScsLightPointGeneral) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0056), nameof(address)); + } + + if (address.Value is not "0") + { + throw new ArgumentException(SR.GetResourceString(SR.ID0057), nameof(address)); + } + + return address.Parameters switch + { + { IsDefaultOrEmpty: true } => null, + + ["4", string value] when ushort.TryParse(value, CultureInfo.InvariantCulture, out ushort extension) && extension is >= 0 and <= 15 + => extension, + + _ => throw new ArgumentException(SR.GetResourceString(SR.ID0057), nameof(address)) + }; + } + + /// + /// Converts the specified address to a SCS light point group address. + /// + /// The address. + /// A SCS light point group address based on the specified address. + /// The address doesn't represent a valid SCS light point group address. + public static (ushort? Extension, ushort? Group) ToScsLightPointGroupAddress(OpenNettyAddress address) + { + if (address.Type is not OpenNettyAddressType.ScsLightPointGroup) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0058), nameof(address)); + } + + if (!string.IsNullOrEmpty(address.Value)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0059), nameof(address)); + } + + return address.Parameters switch + { + [string value] when ushort.TryParse(value, CultureInfo.InvariantCulture, out ushort group) && group is >= 1 and <= 255 + => (Extension: null, Group: group), + + [string first, "4", string third] when + ushort.TryParse(first, CultureInfo.InvariantCulture, out ushort group) && group is >= 1 and <= 255 && + ushort.TryParse(third, CultureInfo.InvariantCulture, out ushort extension) && extension is >= 0 and <= 15 + => (Extension: extension, Group: group), + + _ => throw new ArgumentException(SR.GetResourceString(SR.ID0059), nameof(address)) + }; + } + + /// + /// Converts the specified address to a SCS light point point-to-point address. + /// + /// The address. + /// A SCS light point point-to-point address based on the specified address. + /// The address doesn't represent a valid SCS light point point-to-point address. + public static (ushort? Extension, ushort? Area, ushort? Point) ToScsLightPointPointToPointAddress(OpenNettyAddress address) + { + if (address.Type is not OpenNettyAddressType.ScsLightPointPointToPoint) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0060), nameof(address)); + } + + return address.Parameters switch + { + { IsDefaultOrEmpty: true } when GetAreaAndLightPoint(address.Value) is { Area: ushort area, Point: ushort point } + => (Extension: null, Area: area, Point: point), + + ["4", string value] when + GetAreaAndLightPoint(address.Value) is { Area: ushort area, Point: ushort point } && + ushort.TryParse(value, CultureInfo.InvariantCulture, out ushort extension) && extension is >= 0 and <= 15 + => (Extension: extension, Area: area, Point: point), + + _ => throw new ArgumentException(SR.GetResourceString(SR.ID0061), nameof(address)) + }; + + static (ushort Area, ushort Point) GetAreaAndLightPoint(ReadOnlySpan address) => address switch + { + // A = 00; PL [01 − 15]: + ['0', '0', '0' or '1', >= '0' and <= '9'] when + ushort.TryParse(address[2..4], CultureInfo.InvariantCulture, out ushort point) && point is >= 1 and <= 15 + => (0, point), + + // A [1 − 9]; PL [1 − 9]: + [>= '1' and <= '9', >= '1' and <= '9'] when + ushort.TryParse(address[0..1], CultureInfo.InvariantCulture, out ushort area) && + ushort.TryParse(address[1..2], CultureInfo.InvariantCulture, out ushort point) + => (area, point), + + // A = 10; PL [01 − 15]: + ['1', '0', '0' or '1', >= '0' and <= '9'] when + ushort.TryParse(address[2..4], CultureInfo.InvariantCulture, out ushort point) && point is >= 1 and <= 15 + => (10, point), + + // A [01 − 09]; PL [10 − 15]: + ['0', >= '1' and <= '9', '1', >= '0' and <= '5'] when + ushort.TryParse(address[0..2], CultureInfo.InvariantCulture, out ushort area) && + ushort.TryParse(address[2..4], CultureInfo.InvariantCulture, out ushort point) && point is >= 1 and <= 15 + => (area, point), + + _ => throw new ArgumentException(SR.GetResourceString(SR.ID0061), nameof(address)) + }; + } + + /// + /// Converts the specified address to a Zigbee address. + /// + /// The address. + /// A Zigbee address based on the specified address. + /// The address doesn't represent a valid Zigbee address. + public static (uint? Identifier, ushort Unit) ToZigbeeAddress(OpenNettyAddress address) + { + if (!IsZigbeeAddress(address)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0062), nameof(address)); + } + + if (address.Value is { Length: 2 }) + { + if (!ushort.TryParse(address.Value, CultureInfo.InvariantCulture, out ushort unit)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0063), nameof(address)); + } + + return (Identifier: null, unit); + } + + else if (address.Value is { Length: > 2 }) + { + // Note: Zigbee identifiers are 4-byte long and fit exactly in an + // unsigned 32-bit integer, so a range check is not required. + + if (!uint.TryParse(address.Value.AsSpan()[0..^2], CultureInfo.InvariantCulture, out uint identifier) || + !ushort.TryParse(address.Value.AsSpan()[^2..], CultureInfo.InvariantCulture, out ushort unit) || unit is > 99) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0063), nameof(address)); + } + + return (identifier, unit); + } + + throw new ArgumentException(SR.GetResourceString(SR.ID0063), nameof(address)); + } +} diff --git a/src/OpenNetty/OpenNettyAddressType.cs b/src/OpenNetty/OpenNettyAddressType.cs new file mode 100644 index 0000000..8410739 --- /dev/null +++ b/src/OpenNetty/OpenNettyAddressType.cs @@ -0,0 +1,62 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty address types, as defined by the Nitoo and MyHome specifications. +/// +public enum OpenNettyAddressType +{ + /// + /// Unknown address. + /// + Unknown = 0, + + /// + /// Nitoo device address. + /// + NitooDevice = 1, + + /// + /// Nitoo unit address. + /// + NitooUnit = 2, + + /// + /// Zigbee "all devices, all units" address. + /// + ZigbeeAllDevicesAllUnits = 3, + + /// + /// Zigbee "all devices, specific unit" address. + /// + ZigbeeAllDevicesSpecificUnit = 4, + + /// + /// Zigbee "specific device, all units" address. + /// + ZigbeeSpecificDeviceAllUnits = 5, + + /// + /// Zigbee "specific device, specific unit" address. + /// + ZigbeeSpecificDeviceSpecificUnit = 6, + + /// + /// SCS light point point-to-point address. + /// + ScsLightPointPointToPoint = 7, + + /// + /// SCS light point group address. + /// + ScsLightPointGroup = 8, + + /// + /// SCS light point area address. + /// + ScsLightPointArea = 9, + + /// + /// SCS light point general address. + /// + ScsLightPointGeneral = 10 +} diff --git a/src/OpenNetty/OpenNettyBrand.cs b/src/OpenNetty/OpenNettyBrand.cs new file mode 100644 index 0000000..0d85beb --- /dev/null +++ b/src/OpenNetty/OpenNettyBrand.cs @@ -0,0 +1,17 @@ +namespace OpenNetty; + +/// +/// Represents the brand of an OpenNetty device. +/// +public enum OpenNettyBrand +{ + /// + /// Legrand. + /// + Legrand = 0, + + /// + /// BTicino. + /// + BTicino = 1 +} diff --git a/src/OpenNetty/OpenNettyBuilder.cs b/src/OpenNetty/OpenNettyBuilder.cs new file mode 100644 index 0000000..7fd630f --- /dev/null +++ b/src/OpenNetty/OpenNettyBuilder.cs @@ -0,0 +1,470 @@ +using System.Collections.Immutable; +using System.ComponentModel; +using System.Globalization; +using System.IO.Ports; +using System.Net; +using System.Xml.Linq; +using Microsoft.Extensions.FileProviders; + +namespace Microsoft.Extensions.DependencyInjection; + +/// +/// Exposes the necessary methods required to configure the OpenNetty services. +/// +public sealed class OpenNettyBuilder +{ + /// + /// Creates a new instance of . + /// + /// The services collection. + public OpenNettyBuilder(IServiceCollection services) + => Services = services ?? throw new ArgumentNullException(nameof(services)); + + /// + /// Gets the services collection. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public IServiceCollection Services { get; } + + /// + /// Amends the default OpenNetty configuration. + /// + /// The delegate used to configure the OpenNetty options. + /// This extension can be safely called multiple times. + /// The instance. + public OpenNettyBuilder Configure(Action configuration) + { + ArgumentNullException.ThrowIfNull(configuration); + + Services.Configure(configuration); + + return this; + } + + /// + /// Adds an endpoint to the list of registered endpoints. + /// + /// The endpoint. + /// The instance. + public OpenNettyBuilder AddEndpoint(OpenNettyEndpoint endpoint) + { + ArgumentNullException.ThrowIfNull(endpoint); + + return Configure(options => options.Endpoints.Add(endpoint)); + } + + /// + /// Adds multiple endpoints to the list of registered endpoints. + /// + /// The endpoints. + /// The instance. + public OpenNettyBuilder AddEndpoints(IEnumerable endpoints) + { + ArgumentNullException.ThrowIfNull(endpoints); + + return Configure(options => options.Endpoints.AddRange(endpoints)); + } + + /// + /// Adds a gateway to the list of registered gateways. + /// + /// The gateway. + /// The instance. + public OpenNettyBuilder AddGateway(OpenNettyGateway gateway) + { + ArgumentNullException.ThrowIfNull(gateway); + + return Configure(options => options.Gateways.Add(gateway)); + } + + /// + /// Adds multiple gateways to the list of registered gateways. + /// + /// The gateways. + /// The instance. + public OpenNettyBuilder AddGateways(IEnumerable gateways) + { + ArgumentNullException.ThrowIfNull(gateways); + + return Configure(options => options.Gateways.AddRange(gateways)); + } + + /// + /// Imports the OpenNetty configuration from the specified . + /// + /// The file. + /// The instance. + public OpenNettyBuilder ImportFromXmlConfiguration(IFileInfo file) + { + ArgumentNullException.ThrowIfNull(file); + + if (!file.Exists) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0077)); + } + + using var stream = file.CreateReadStream(); + return ImportFromXmlConfiguration(stream); + } + + /// + /// Imports the OpenNetty configuration from the specified . + /// + /// The file path. + /// The instance. + public OpenNettyBuilder ImportFromXmlConfiguration(string path) + { + ArgumentException.ThrowIfNullOrEmpty(path); + + if (!File.Exists(path)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0077)); + } + + return ImportFromXmlConfiguration(XDocument.Load(path)); + } + + /// + /// Imports the OpenNetty configuration from the specified . + /// + /// The stream. + /// The instance. + public OpenNettyBuilder ImportFromXmlConfiguration(Stream stream) + { + ArgumentNullException.ThrowIfNull(stream); + + return ImportFromXmlConfiguration(XDocument.Load(stream)); + } + + /// + /// Imports the OpenNetty configuration from the specified . + /// + /// The document. + /// The instance. + public OpenNettyBuilder ImportFromXmlConfiguration(XDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + if (document.Root?.Name != "Configuration") + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0078)); + } + + List endpoints = []; + List gateways = []; + + foreach (var gateway in document.Root.Descendants("Gateway")) + { + if (gateway.Parent?.Name != "Device") + { + throw new NotSupportedException(SR.GetResourceString(SR.ID0080)); + } + + var device = GetEndpointDevice(gateway.Parent); + + gateways.Add((string?) gateway.Attribute("Type") switch + { + "Serial" => OpenNettyGateway.Create( + name : (string?) gateway.Attribute("Name") ?? throw new InvalidOperationException(SR.FormatID0081("Name")), + device: device, + port : new SerialPort( + portName: (string?) gateway.Attribute("Port") ?? throw new InvalidOperationException(SR.FormatID0082("Port")), + baudRate: (int?) gateway.Attribute("BaudRate") switch + { + int value => value, + + null when device.Definition.Settings.TryGetValue(OpenNettySettings.SerialPortBaudRate, out string? setting) + => int.Parse(setting, CultureInfo.InvariantCulture), + + null => throw new InvalidOperationException(SR.FormatID0082("BaudRate")), + }, + parity: (string?) gateway.Attribute("Parity") switch + { + "None" => Parity.None, + "Odd" => Parity.Odd, + "Even" => Parity.Even, + "Mark" => Parity.Mark, + "Space" => Parity.Space, + + null when device.Definition.Settings.TryGetValue(OpenNettySettings.SerialPortParity, out string? setting) + => setting switch + { + "None" => Parity.None, + "Odd" => Parity.Odd, + "Even" => Parity.Even, + "Mark" => Parity.Mark, + "Space" => Parity.Space, + + string value => throw new InvalidOperationException(SR.FormatID0106(value)) + }, + + null or { Length: 0 } => throw new InvalidOperationException(SR.FormatID0082("Parity")), + + string value => throw new InvalidOperationException(SR.FormatID0106(value)) + }, + dataBits: (int?) gateway.Attribute("DataBits") switch + { + int value => value, + + null when device.Definition.Settings.TryGetValue(OpenNettySettings.SerialPortDataBits, out string? setting) + => int.Parse(setting, CultureInfo.InvariantCulture), + + null => throw new InvalidOperationException(SR.FormatID0082("DataBits")), + }, + stopBits: (string?) gateway.Attribute("StopBits") switch + { + "1" => StopBits.One, + "1.5" => StopBits.OnePointFive, + "2" => StopBits.Two, + + null when device.Definition.Settings.TryGetValue(OpenNettySettings.SerialPortStopBits, out string? setting) + => setting switch + { + "1" => StopBits.One, + "1.5" => StopBits.OnePointFive, + "2" => StopBits.Two, + + string value => throw new InvalidOperationException(SR.FormatID0107(value)) + }, + + null or { Length: 0 } => throw new InvalidOperationException(SR.FormatID0082("StopBits")), + + string value => throw new InvalidOperationException(SR.FormatID0107(value)) + })), + + "Tcp" => OpenNettyGateway.Create( + name : (string?) gateway.Attribute("Name") ?? throw new InvalidOperationException(SR.FormatID0081("Name")), + device : device, + endpoint: new IPEndPoint( + address: IPAddress.Parse((string?) gateway.Attribute("Server") ?? throw new InvalidOperationException(SR.FormatID0083("Server"))), + port : (int?) gateway.Attribute("Port") ?? 20_000), + password: (string?) gateway.Attribute("Password")), + + null or { Length: 0 } => throw new InvalidOperationException(SR.FormatID0081("Type")), + + _ => throw new InvalidOperationException(SR.GetResourceString(SR.ID0084)) + }); + } + + foreach (var endpoint in document.Root.Descendants("Endpoint")) + { + // Ensure an endpoint with an identical name wasn't already added to the list of endpoints. + var name = (string?) endpoint.Attribute("Name"); + if (name is not null && endpoints.Exists(endpoint => endpoint.Name == name)) + { + throw new InvalidOperationException(SR.FormatID0085(name)); + } + + var device = endpoint.Parent?.Name == "Device" ? GetEndpointDevice(endpoint.Parent) : + endpoint.Parent?.Name == "Unit" && endpoint.Parent.Parent?.Name == "Device" ? GetEndpointDevice(endpoint.Parent.Parent) : null; + + var unit = device is not null && endpoint.Parent?.Name == "Unit" ? GetEndpointDeviceUnit(device, endpoint.Parent, + (ushort?) (uint?) endpoint.Parent.Attribute("Id") ?? throw new InvalidOperationException(SR.FormatID0086("Id"))) : null; + + var type = (string?) endpoint.Attribute("Type") switch + { + "Nitoo device" => OpenNettyAddressType.NitooDevice, + "Nitoo unit" => OpenNettyAddressType.NitooUnit, + "SCS light point area" => OpenNettyAddressType.ScsLightPointArea, + "SCS light point general" => OpenNettyAddressType.ScsLightPointGeneral, + "SCS light point group" => OpenNettyAddressType.ScsLightPointGroup, + "SCS light point point-to-point" => OpenNettyAddressType.ScsLightPointPointToPoint, + "Zigbee all devices, all units" => OpenNettyAddressType.ZigbeeAllDevicesAllUnits, + "Zigbee all devices, specific unit" => OpenNettyAddressType.ZigbeeAllDevicesSpecificUnit, + "Zigbee specific device, all units" => OpenNettyAddressType.ZigbeeSpecificDeviceAllUnits, + "Zigbee specific device, specific unit" => OpenNettyAddressType.ZigbeeSpecificDeviceSpecificUnit, + + null => (OpenNettyAddressType?) null, + + string value => throw new InvalidOperationException(SR.FormatID0087(value)) + }; + + // Try to infer common address types if no explicit type was specified. + type ??= device?.Definition.Protocol switch + { + // Note: gateway endpoints don't have an address attached. + _ when device is not null && device.Definition.Capabilities.Contains(OpenNettyCapabilities.OpenWebNetGateway) => null, + + OpenNettyProtocol.Nitoo when unit is not null => OpenNettyAddressType.NitooUnit, + OpenNettyProtocol.Nitoo => OpenNettyAddressType.NitooDevice, + + OpenNettyProtocol.Scs when endpoint.Attribute("Area") is not null && endpoint.Attribute("Point") is null + => OpenNettyAddressType.ScsLightPointArea, + + OpenNettyProtocol.Scs when endpoint.Attribute("Area") is null && endpoint.Attribute("Group") is null + => OpenNettyAddressType.ScsLightPointGeneral, + + OpenNettyProtocol.Scs when endpoint.Attribute("Group") is not null + => OpenNettyAddressType.ScsLightPointGroup, + + OpenNettyProtocol.Scs when endpoint.Attribute("Area") is not null && endpoint.Attribute("Point") is not null + => OpenNettyAddressType.ScsLightPointPointToPoint, + + OpenNettyProtocol.Zigbee when unit is not null => OpenNettyAddressType.ZigbeeSpecificDeviceSpecificUnit, + OpenNettyProtocol.Zigbee => OpenNettyAddressType.ZigbeeSpecificDeviceAllUnits, + + _ => throw new InvalidOperationException(SR.FormatID0088(name, "Type")) + }; + + var address = type switch + { + null => (OpenNettyAddress?) null, + + OpenNettyAddressType.NitooDevice => OpenNettyAddress.FromNitooAddress( + uint.Parse(device?.SerialNumber ?? throw new InvalidOperationException(SR.FormatID0089("SerialNumber")), CultureInfo.InvariantCulture)), + + OpenNettyAddressType.NitooUnit => OpenNettyAddress.FromNitooAddress( + identifier: uint.Parse(device?.SerialNumber ?? throw new InvalidOperationException(SR.FormatID0089("SerialNumber")), CultureInfo.InvariantCulture), + unit : (ushort?) (uint?) endpoint.Parent?.Attribute("Id") ?? throw new InvalidOperationException(SR.FormatID0090("Id"))), + + OpenNettyAddressType.ScsLightPointArea => OpenNettyAddress.FromScsLightPointAreaAddress( + area : (ushort?) (uint?) endpoint.Attribute("Area") ?? throw new InvalidOperationException(SR.FormatID0091("Area")), + extension: (ushort?) (uint?) endpoint.Attribute("Extension") ?? 0), + + OpenNettyAddressType.ScsLightPointGeneral => OpenNettyAddress.FromScsLightPointGeneralAddress( + extension: (ushort?) (uint?) endpoint.Attribute("Extension") ?? 0), + + OpenNettyAddressType.ScsLightPointGroup => OpenNettyAddress.FromScsLightPointGroupAddress( + group : (ushort?) (uint?) endpoint.Attribute("Group") ?? throw new InvalidOperationException(SR.FormatID0092("Group")), + extension: (ushort?) (uint?) endpoint.Attribute("Extension") ?? 0), + + OpenNettyAddressType.ScsLightPointPointToPoint => OpenNettyAddress.FromScsLightPointPointToPointAddress( + area : (ushort?) (uint?) endpoint.Attribute("Area") ?? throw new InvalidOperationException(SR.FormatID0093("Area")), + point : (ushort?) (uint?) endpoint.Attribute("Point") ?? throw new InvalidOperationException(SR.FormatID0093("Point")), + extension: (ushort?) (uint?) endpoint.Attribute("Extension") ?? 0), + + OpenNettyAddressType.ZigbeeAllDevicesAllUnits => OpenNettyAddress.FromHexadecimalZigbeeAddress(null), + + OpenNettyAddressType.ZigbeeAllDevicesSpecificUnit => OpenNettyAddress.FromHexadecimalZigbeeAddress( + identifier: null, + unit : (ushort?) (uint?) endpoint.Parent?.Attribute("Id") ?? throw new InvalidOperationException(SR.FormatID0095("Id"))), + + OpenNettyAddressType.ZigbeeSpecificDeviceAllUnits => OpenNettyAddress.FromHexadecimalZigbeeAddress( + device?.SerialNumber ?? throw new InvalidOperationException(SR.FormatID0094("SerialNumber"))), + + OpenNettyAddressType.ZigbeeSpecificDeviceSpecificUnit => OpenNettyAddress.FromHexadecimalZigbeeAddress( + identifier: device?.SerialNumber ?? throw new InvalidOperationException(SR.FormatID0094("SerialNumber")), + unit : (ushort?) (uint?) endpoint.Parent?.Attribute("Id") ?? throw new InvalidOperationException(SR.FormatID0095("Id"))), + + _ => throw new InvalidOperationException(SR.FormatID0088(name, "Type")) + }; + + var protocol = address is not null ? + (OpenNettyAddress.IsNitooAddress(address.Value) ? OpenNettyProtocol.Nitoo : + OpenNettyAddress.IsScsAddress(address.Value) ? OpenNettyProtocol.Scs : + OpenNettyAddress.IsZigbeeAddress(address.Value) ? OpenNettyProtocol.Zigbee : + throw new InvalidOperationException(SR.FormatID0088(name, "Type"))) : + device?.Definition.Protocol ?? throw new InvalidOperationException(SR.FormatID0088(name, "Type")); + + endpoints.Add(new OpenNettyEndpoint + { + Address = address, + Capabilities = GetEndpointCapabilities(endpoint), + Device = device, + Gateway = (string?) endpoint.Attribute("Gateway") is string gateway ? FindGatewayByName(gateways, gateway) : null, + Media = device?.Definition.Media, + Name = name, + Protocol = protocol, + Settings = GetSettings(endpoint), + Unit = unit + }); + } + + return Configure(options => + { + options.Endpoints.AddRange(endpoints); + options.Gateways.AddRange(gateways); + }); + + static ImmutableHashSet GetEndpointCapabilities(XElement element) => + element.Elements("Capability") + .Select(static element => (string?) element.Attribute("Name") ?? throw new InvalidOperationException(SR.FormatID0096("Name"))) + .Select(static name => new OpenNettyCapability(name)) + .ToImmutableHashSet(); + + static OpenNettyDevice GetEndpointDevice(XElement element) + { + var brand = (string?) element.Attribute("Brand"); + if (string.IsNullOrEmpty(brand)) + { + throw new InvalidOperationException(SR.FormatID0097("Brand")); + } + + var model = (string?) element.Attribute("Model"); + if (string.IsNullOrEmpty(model)) + { + throw new InvalidOperationException(SR.FormatID0097("Model")); + } + + return new OpenNettyDevice + { + Definition = OpenNettyDevices.GetDeviceByModel(Enum.Parse(brand), model) ?? + throw new InvalidOperationException(SR.FormatID0098(brand, model)), + + SerialNumber = (string?) element.Attribute("SerialNumber"), + Settings = GetSettings(element) + }; + } + + static OpenNettyUnit GetEndpointDeviceUnit(OpenNettyDevice device, XElement element, ushort unit) + { + var brand = (string?) element.Parent?.Attribute("Brand"); + if (string.IsNullOrEmpty(brand)) + { + throw new InvalidOperationException(SR.FormatID0097("Brand")); + } + + var model = (string?) element.Parent?.Attribute("Model"); + if (string.IsNullOrEmpty(model)) + { + throw new InvalidOperationException(SR.FormatID0097("Model")); + } + + return new() + { + Definition = OpenNettyDevices.GetUnitByModel(Enum.Parse(brand), model, unit) ?? + throw new InvalidOperationException(SR.FormatID0100(brand, model, unit)), + + Scenarios = [.. element.Elements("Scenario").Select(GetScenario)], + Settings = GetSettings(element) + }; + } + + static ImmutableDictionary GetSettings(XElement element) => + element.Elements("Setting").ToImmutableDictionary( + element => new OpenNettySetting((string?) element.Attribute("Name") ?? throw new InvalidOperationException(SR.FormatID0099("Name"))), + element => (string?) element.Attribute("Value") ?? throw new InvalidOperationException(SR.FormatID0099("Name"))); + + static OpenNettyScenario GetScenario(XElement element) => new() + { + EndpointName = (string?) element.Attribute("Endpoint") ?? throw new InvalidOperationException(SR.FormatID0101("Endpoint")), + FunctionCode = (ushort?) (uint?) element.Attribute("Function") ?? throw new InvalidOperationException(SR.FormatID0101("Function")) + }; + + static OpenNettyGateway FindGatewayByName(IReadOnlyList gateways, string name) + { + for (var index = 0; index < gateways.Count; index++) + { + var gateway = gateways[index]; + if (string.Equals(gateway.Name, name, StringComparison.OrdinalIgnoreCase)) + { + return gateway; + } + } + + throw new InvalidOperationException(SR.FormatID0102(name)); + } + } + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object? obj) => base.Equals(obj); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => base.GetHashCode(); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override string? ToString() => base.ToString(); +} diff --git a/src/OpenNetty/OpenNettyCapabilities.cs b/src/OpenNetty/OpenNettyCapabilities.cs new file mode 100644 index 0000000..80f330c --- /dev/null +++ b/src/OpenNetty/OpenNettyCapabilities.cs @@ -0,0 +1,167 @@ +namespace OpenNetty; + +/// +/// Exposes common capabilities supported by OpenNetty endpoints or devices. +/// +public static class OpenNettyCapabilities +{ + /// + /// Advanced dimming control. + /// + public static readonly OpenNettyCapability AdvancedDimmingControl = new("Advanced dimming control"); + + /// + /// Advanced dimming state. + /// + public static readonly OpenNettyCapability AdvancedDimmingState = new("Advanced dimming state"); + + /// + /// Basic dimming control. + /// + public static readonly OpenNettyCapability BasicDimmingControl = new("Basic dimming control"); + + /// + /// Basic dimming state. + /// + public static readonly OpenNettyCapability BasicDimmingState = new("Basic dimming state"); + + /// + /// Basic scenario. + /// + public static readonly OpenNettyCapability BasicScenario = new("Basic scenario"); + + /// + /// Battery. + /// + public static readonly OpenNettyCapability Battery = new("Battery"); + + /// + /// Date/time. + /// + public static readonly OpenNettyCapability DateTime = new("Date/time"); + + /// + /// Device description. + /// + public static readonly OpenNettyCapability DeviceDescription = new("Device description"); + + /// + /// Dimming scenario. + /// + public static readonly OpenNettyCapability DimmingScenario = new("Dimming scenario"); + + /// + /// Memory reading. + /// + public static readonly OpenNettyCapability MemoryReading = new("Memory reading"); + + /// + /// Memory writing. + /// + public static readonly OpenNettyCapability MemoryWriting = new("Memory writing"); + + /// + /// On/off scenario. + /// + public static readonly OpenNettyCapability OnOffScenario = new("On/off scenario"); + + /// + /// On/off switching. + /// + public static readonly OpenNettyCapability OnOffSwitching = new("On/off switching"); + + /// + /// On/off switch state. + /// + public static readonly OpenNettyCapability OnOffSwitchState = new("On/off switch state"); + + /// + /// OpenWebNet gateway. + /// + public static readonly OpenNettyCapability OpenWebNetGateway = new("OpenWebNet gateway"); + + /// + /// OpenWebNet command session. + /// + public static readonly OpenNettyCapability OpenWebNetCommandSession = new("OpenWebNet command session"); + + /// + /// OpenWebNet event session. + /// + public static readonly OpenNettyCapability OpenWebNetEventSession = new("OpenWebNet event session"); + + /// + /// OpenWebNet generic session. + /// + public static readonly OpenNettyCapability OpenWebNetGenericSession = new("OpenWebNet generic session"); + + /// + /// Pilot wire heating. + /// + public static readonly OpenNettyCapability PilotWireHeating = new("Pilot wire heating"); + + /// + /// Pilot wire scenario. + /// + public static readonly OpenNettyCapability PilotWireScenario = new("Pilot wire scenario"); + + /// + /// Progressive scenario. + /// + public static readonly OpenNettyCapability ProgressiveScenario = new("Progressive scenario"); + + /// + /// Smart meter indexes. + /// + public static readonly OpenNettyCapability SmartMeterIndexes = new("Smart meter indexes"); + + /// + /// Smart meter information. + /// + public static readonly OpenNettyCapability SmartMeterInformation = new("Smart meter information"); + + /// + /// Timed scenario. + /// + public static readonly OpenNettyCapability TimedScenario = new("Timed scenario"); + + /// + /// Toggle scenario. + /// + public static readonly OpenNettyCapability ToggleScenario = new("Toggle scenario"); + + /// + /// Unit description. + /// + public static readonly OpenNettyCapability UnitDescription = new("Unit description"); + + /// + /// Uptime. + /// + public static readonly OpenNettyCapability Uptime = new("Uptime"); + + /// + /// Water heating. + /// + public static readonly OpenNettyCapability WaterHeating = new("Water heating"); + + /// + /// Wireless burglar alarm scenario. + /// + public static readonly OpenNettyCapability WirelessBurglarAlarmScenario = new("Wireless burglar alarm scenario"); + + /// + /// Wireless burglar alarm state. + /// + public static readonly OpenNettyCapability WirelessBurglarAlarmState = new("Wireless burglar alarm state"); + + /// + /// Zigbee binding. + /// + public static readonly OpenNettyCapability ZigbeeBinding = new("Zigbee binding"); + + /// + /// Zigbee supervision. + /// + public static readonly OpenNettyCapability ZigbeeSupervision = new("Zigbee supervision"); +} diff --git a/src/OpenNetty/OpenNettyCapability.cs b/src/OpenNetty/OpenNettyCapability.cs new file mode 100644 index 0000000..8accfa8 --- /dev/null +++ b/src/OpenNetty/OpenNettyCapability.cs @@ -0,0 +1,54 @@ +namespace OpenNetty; + +/// +/// Represents a capability supported by an OpenNetty endpoint or device. +/// +public readonly struct OpenNettyCapability : IEquatable +{ + /// + /// Creates a new instance of the structure. + /// + /// The capability name. + public OpenNettyCapability(string name) + { + ArgumentException.ThrowIfNullOrEmpty(name); + + Name = name; + } + + /// + /// Gets the name associated with the capability. + /// + public string Name { get; } + + /// + public bool Equals(OpenNettyCapability other) => string.Equals(Name, other.Name, StringComparison.OrdinalIgnoreCase); + + /// + public override bool Equals(object? obj) => obj is OpenNettyCapability capability && Equals(capability); + + /// + public override int GetHashCode() => Name?.GetHashCode() ?? 0; + + /// + /// Computes the representation of the current capability. + /// + /// The representation of the current capability. + public override string ToString() => Name?.ToString() ?? string.Empty; + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyCapability left, OpenNettyCapability right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyCapability left, OpenNettyCapability right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyCategories.cs b/src/OpenNetty/OpenNettyCategories.cs new file mode 100644 index 0000000..ee92ead --- /dev/null +++ b/src/OpenNetty/OpenNettyCategories.cs @@ -0,0 +1,37 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty categories, as defined by the Nitoo and MyHome specifications. +/// +public static class OpenNettyCategories +{ + /// + /// Lighting (WHO = 1). + /// + public static readonly OpenNettyCategory Lighting = new("1"); + + /// + /// Automation (WHO = 2). + /// + public static readonly OpenNettyCategory Automation = new("2"); + + /// + /// Temperature control (WHO = 4). + /// + public static readonly OpenNettyCategory TemperatureControl = new("4"); + + /// + /// Management (WHO = 13). + /// + public static readonly OpenNettyCategory Management = new("13"); + + /// + /// Scenarios (WHO = 25). + /// + public static readonly OpenNettyCategory Scenarios = new("25"); + + /// + /// Diagnostics (WHO = 1000). + /// + public static readonly OpenNettyCategory Diagnostics = new("1000"); +} diff --git a/src/OpenNetty/OpenNettyCategory.cs b/src/OpenNetty/OpenNettyCategory.cs new file mode 100644 index 0000000..8f3ae4d --- /dev/null +++ b/src/OpenNetty/OpenNettyCategory.cs @@ -0,0 +1,212 @@ +using System.Collections.Immutable; +using System.Diagnostics; +using System.Text; +using static OpenNetty.OpenNettyConstants; + +namespace OpenNetty; + +/// +/// Represents the category of an OpenNetty message. +/// +[DebuggerDisplay("{ToString(),nq}")] +public readonly struct OpenNettyCategory : IEquatable +{ + /// + /// Creates a new instance of the structure. + /// + /// The value. + public OpenNettyCategory(string value) + : this(value, []) + { + } + + /// + /// Creates a new instance of the structure. + /// + /// The value. + /// The additional parameters, if applicable. + public OpenNettyCategory(string value, ImmutableArray parameters) + { + ArgumentNullException.ThrowIfNull(value); + + // Ensure the value only includes ASCII digits. + foreach (var character in value) + { + if (!char.IsAsciiDigit(character)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0004), nameof(value)); + } + } + + // Ensure the parameters only include ASCII digits. + if (!Parameters.IsDefaultOrEmpty) + { + for (var index = 0; index < parameters.Length; index++) + { + foreach (var character in parameters[index]) + { + if (!char.IsAsciiDigit(character)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0004), nameof(value)); + } + } + } + } + + Value = value; + Parameters = parameters; + } + + /// + /// Gets the value associated with the category. + /// + public string Value { get; } + + /// + /// Gets the additional parameters associated with the category, if applicable. + /// + public ImmutableArray Parameters { get; } + + /// + public bool Equals(OpenNettyCategory other) + { + if (Value is null) + { + return other.Value is null; + } + + if (!string.Equals(Value, other.Value, StringComparison.Ordinal)) + { + return false; + } + + if (!Parameters.IsDefaultOrEmpty && !other.Parameters.IsDefaultOrEmpty) + { + if (Parameters.Length != other.Parameters.Length) + { + return false; + } + + for (var index = 0; index < Parameters.Length; index++) + { + if (!string.Equals(Parameters[index], other.Parameters[index], StringComparison.Ordinal)) + { + return false; + } + } + } + + else if (Parameters.IsDefaultOrEmpty && !other.Parameters.IsDefaultOrEmpty) + { + return false; + } + + else if (!Parameters.IsDefaultOrEmpty && other.Parameters.IsDefaultOrEmpty) + { + return false; + } + + return true; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyAddress address && Equals(address); + + /// + public override int GetHashCode() + { + if (Value is null) + { + return 0; + } + + var hash = new HashCode(); + hash.Add(Value); + + if (!Parameters.IsDefaultOrEmpty) + { + hash.Add(Parameters.Length); + + for (var index = 0; index < Parameters.Length; index++) + { + hash.Add(Parameters[index]); + } + } + + else + { + hash.Add(0); + } + + return hash.ToHashCode(); + } + + /// + /// Computes the representation of the current category. + /// + /// The representation of the current category. + public override string ToString() + { + if (Value is null) + { + return string.Empty; + } + + if (Parameters.IsDefaultOrEmpty) + { + return Value; + } + + var builder = new StringBuilder(); + builder.Append(Value); + + for (var index = 0; index < Parameters.Length; index++) + { + builder.Append((char) Separators.Hash[0]); + builder.Append(Parameters[index]); + } + + return builder.ToString(); + } + + /// + /// Converts the category to a list of . + /// + /// The list of representing this category. + public ImmutableArray ToParameters() + { + if (Value is null) + { + return []; + } + + var builder = ImmutableArray.CreateBuilder(); + builder.Add(new OpenNettyParameter(Value)); + + if (!Parameters.IsDefaultOrEmpty) + { + for (var index = 0; index < Parameters.Length; index++) + { + builder.Add(new OpenNettyParameter(Parameters[index])); + } + } + + return builder.ToImmutable(); + } + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyCategory left, OpenNettyCategory right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyCategory left, OpenNettyCategory right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyCommand.cs b/src/OpenNetty/OpenNettyCommand.cs new file mode 100644 index 0000000..752db18 --- /dev/null +++ b/src/OpenNetty/OpenNettyCommand.cs @@ -0,0 +1,233 @@ +using System.Collections.Immutable; +using System.Diagnostics; +using System.Text; +using static OpenNetty.OpenNettyConstants; + +namespace OpenNetty; + +/// +/// Represents the command associated with an OpenNetty message. +/// +[DebuggerDisplay("{ToString(),nq}")] +public readonly struct OpenNettyCommand : IEquatable +{ + /// + /// Creates a new instance of the structure. + /// + /// The category. + /// The value. + public OpenNettyCommand(OpenNettyCategory category, string value) + : this(category, value, []) + { + } + + /// + /// Creates a new instance of the structure. + /// + /// The category. + /// The value. + /// The additional parameters, if applicable. + public OpenNettyCommand(OpenNettyCategory category, string value, ImmutableArray parameters) + { + ArgumentNullException.ThrowIfNull(value); + + // Ensure the value only includes ASCII digits. + foreach (var character in value) + { + if (!char.IsAsciiDigit(character)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0004), nameof(value)); + } + } + + // Ensure the parameters only include ASCII digits. + if (!Parameters.IsDefaultOrEmpty) + { + for (var index = 0; index < parameters.Length; index++) + { + foreach (var character in parameters[index]) + { + if (!char.IsAsciiDigit(character)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0004), nameof(value)); + } + } + } + } + + Category = category; + Value = value; + Parameters = parameters; + } + + /// + /// Gets the category associated with the command. + /// + public OpenNettyCategory Category { get; } + + /// + /// Gets the value associated with the command. + /// + public string Value { get; } + + /// + /// Gets the additional parameters associated with the command, if applicable. + /// + public ImmutableArray Parameters { get; } + + /// + public bool Equals(OpenNettyCommand other) + { + if (Value is null) + { + return other.Value is null; + } + + if (Category != other.Category) + { + return false; + } + + if (!string.Equals(Value, other.Value, StringComparison.Ordinal)) + { + return false; + } + + if (!Parameters.IsDefaultOrEmpty && !other.Parameters.IsDefaultOrEmpty) + { + if (Parameters.Length != other.Parameters.Length) + { + return false; + } + + for (var index = 0; index < Parameters.Length; index++) + { + if (!string.Equals(Parameters[index], other.Parameters[index], StringComparison.Ordinal)) + { + return false; + } + } + } + + else if (Parameters.IsDefaultOrEmpty && !other.Parameters.IsDefaultOrEmpty) + { + return false; + } + + else if (!Parameters.IsDefaultOrEmpty && other.Parameters.IsDefaultOrEmpty) + { + return false; + } + + return true; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyCommand command && Equals(command); + + /// + public override int GetHashCode() + { + if (Value is null) + { + return 0; + } + + var hash = new HashCode(); + hash.Add(Category); + hash.Add(Value); + + if (!Parameters.IsDefaultOrEmpty) + { + hash.Add(Parameters.Length); + + for (var index = 0; index < Parameters.Length; index++) + { + hash.Add(Parameters[index]); + } + } + + else + { + hash.Add(0); + } + + return hash.ToHashCode(); + } + + /// + /// Computes the representation of the current command. + /// + /// The representation of the current command. + public override string ToString() + { + if (Value is null) + { + return string.Empty; + } + + if (Parameters.IsDefaultOrEmpty) + { + return Value; + } + + var builder = new StringBuilder(); + builder.Append(Value); + + for (var index = 0; index < Parameters.Length; index++) + { + builder.Append((char) Separators.Hash[0]); + builder.Append(Parameters[index]); + } + + return builder.ToString(); + } + + /// + /// Converts the command to a list of . + /// + /// The list of representing this command. + public ImmutableArray ToParameters() + { + if (Value is null) + { + return []; + } + + var builder = ImmutableArray.CreateBuilder(); + builder.Add(new OpenNettyParameter(Value)); + + if (!Parameters.IsDefaultOrEmpty) + { + for (var index = 0; index < Parameters.Length; index++) + { + builder.Add(new OpenNettyParameter(Parameters[index])); + } + } + + return builder.ToImmutable(); + } + + /// + /// Creates a copy of the current instance with the specified parameters attached. + /// + /// The parameters. + /// A copy of the current instance with the specified parameters attached + public OpenNettyCommand WithParameters(params ImmutableArray parameters) => new(Category, Value, parameters); + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyCommand left, OpenNettyCommand right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyCommand left, OpenNettyCommand right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyCommands.cs b/src/OpenNetty/OpenNettyCommands.cs new file mode 100644 index 0000000..357a49e --- /dev/null +++ b/src/OpenNetty/OpenNettyCommands.cs @@ -0,0 +1,221 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty commands, as defined by the Nitoo and MyHome specifications. +/// +public static class OpenNettyCommands +{ + /// + /// Lighting commands (WHO = 1). + /// + public static class Lighting + { + /// + /// Off (WHAT = 0). + /// + public static readonly OpenNettyCommand Off = new(OpenNettyCategories.Lighting, "0"); + + /// + /// On (WHAT = 1). + /// + public static readonly OpenNettyCommand On = new(OpenNettyCategories.Lighting, "1"); + + /// + /// On, 20% (WHAT = 2). + /// + public static readonly OpenNettyCommand On20 = new(OpenNettyCategories.Lighting, "2"); + + /// + /// On, 30% (WHAT = 3). + /// + public static readonly OpenNettyCommand On30 = new(OpenNettyCategories.Lighting, "3"); + + /// + /// On, 40% (WHAT = 4). + /// + public static readonly OpenNettyCommand On40 = new(OpenNettyCategories.Lighting, "4"); + + /// + /// On, 50% (WHAT = 5). + /// + public static readonly OpenNettyCommand On50 = new(OpenNettyCategories.Lighting, "5"); + + /// + /// On, 60% (WHAT = 6). + /// + public static readonly OpenNettyCommand On60 = new(OpenNettyCategories.Lighting, "6"); + + /// + /// On, 70% (WHAT = 7). + /// + public static readonly OpenNettyCommand On70 = new(OpenNettyCategories.Lighting, "7"); + + /// + /// On, 80% (WHAT = 8). + /// + public static readonly OpenNettyCommand On80 = new(OpenNettyCategories.Lighting, "8"); + + /// + /// On, 90% (WHAT = 9). + /// + public static readonly OpenNettyCommand On90 = new(OpenNettyCategories.Lighting, "9"); + + /// + /// On, 100% (WHAT = 10). + /// + public static readonly OpenNettyCommand On100 = new(OpenNettyCategories.Lighting, "10"); + + /// + /// Toggle (WHAT = 32). + /// + public static readonly OpenNettyCommand Toggle = new(OpenNettyCategories.Lighting, "32"); + + /// + /// Dim stop (WHAT = 38). + /// + public static readonly OpenNettyCommand DimStop = new(OpenNettyCategories.Lighting, "38"); + } + + /// + /// Temperature control commands (WHO = 4). + /// + public static class TemperatureControl + { + /// + /// Wire pilot setpoint mode (WHAT = 50). + /// + /// + /// Note: this command requires specifying additional parameters. + /// + public static readonly OpenNettyCommand WirePilotSetpointMode = new(OpenNettyCategories.TemperatureControl, "50"); + + /// + /// Wire pilot derogation mode (WHAT = 51). + /// + /// + /// Note: this command requires specifying additional parameters. + /// + public static readonly OpenNettyCommand WirePilotDerogationMode = new(OpenNettyCategories.TemperatureControl, "51"); + + /// + /// Cancel wire pilot derogation mode (WHAT = 52). + /// + public static readonly OpenNettyCommand CancelWirePilotDerogationMode = new(OpenNettyCategories.TemperatureControl, "52"); + } + + /// + /// Management commands (WHO = 13). + /// + public static class Management + { + /// + /// Battery weak (WHAT = 24). + /// + public static readonly OpenNettyCommand BatteryWeak = new(OpenNettyCategories.Management, "24"); + + /// + /// Supervisor (WHAT = 66). + /// + public static readonly OpenNettyCommand Supervisor = new(OpenNettyCategories.Management, "66"); + + /// + /// Supervisor remove (WHAT = 67). + /// + public static readonly OpenNettyCommand SupervisorRemove = new(OpenNettyCategories.Management, "67"); + } + + /// + /// Scenario commands (WHO = 25). + /// + public static class Scenario + { + /// + /// Action (WHAT = 11). + /// + public static readonly OpenNettyCommand Action = new(OpenNettyCategories.Scenarios, "11"); + + /// + /// Stop action (WHAT = 16). + /// + public static readonly OpenNettyCommand StopAction = new(OpenNettyCategories.Scenarios, "16"); + + /// + /// Action for time (WHAT = 17). + /// + public static readonly OpenNettyCommand ActionForTime = new(OpenNettyCategories.Scenarios, "17"); + + /// + /// Action in time (WHAT = 18). + /// + public static readonly OpenNettyCommand ActionInTime = new(OpenNettyCategories.Scenarios, "18"); + + /// + /// Short pressure (WHAT = 21). + /// + public static readonly OpenNettyCommand ShortPressure = new(OpenNettyCategories.Scenarios, "21"); + + /// + /// Binding request (WHAT = 33). + /// + public static readonly OpenNettyCommand BindingRequest = new(OpenNettyCategories.Scenarios, "33"); + + /// + /// Unbinding request (WHAT = 34). + /// + public static readonly OpenNettyCommand UnbindingRequest = new(OpenNettyCategories.Scenarios, "34"); + + /// + /// Open binding (WHAT = 35). + /// + public static readonly OpenNettyCommand OpenBinding = new(OpenNettyCategories.Scenarios, "35"); + + /// + /// Close binding (WHAT = 36). + /// + public static readonly OpenNettyCommand CloseBinding = new(OpenNettyCategories.Scenarios, "36"); + } + + /// + /// Diagnostics commands (WHO = 1000). + /// + public static class Diagnostics + { + /// + /// Open learning (WHAT = 61). + /// + /// + /// Note: this command requires specifying additional parameters. + /// + public static readonly OpenNettyCommand OpenLearning = new(OpenNettyCategories.Diagnostics, "61"); + + /// + /// Close learning (WHAT = 62). + /// + public static readonly OpenNettyCommand CloseLearning = new(OpenNettyCategories.Diagnostics, "62"); + + /// + /// Address erase (WHAT = 63). + /// + public static readonly OpenNettyCommand AddressErase = new(OpenNettyCategories.Diagnostics, "63"); + + /// + /// Memory reset (WHAT = 64). + /// + public static readonly OpenNettyCommand MemoryReset = new(OpenNettyCategories.Diagnostics, "64"); + + /// + /// Memory read (WHAT = 66). + /// + public static readonly OpenNettyCommand MemoryRead = new(OpenNettyCategories.Diagnostics, "66"); + + /// + /// Valid action (WHAT = 72). + /// + public static readonly OpenNettyCommand ValidAction = new(OpenNettyCategories.Diagnostics, "72"); + + /// + /// Invalid action (WHAT = 73). + /// + public static readonly OpenNettyCommand InvalidAction = new(OpenNettyCategories.Diagnostics, "73"); + } +} diff --git a/src/OpenNetty/OpenNettyConfiguration.cs b/src/OpenNetty/OpenNettyConfiguration.cs new file mode 100644 index 0000000..579299c --- /dev/null +++ b/src/OpenNetty/OpenNettyConfiguration.cs @@ -0,0 +1,29 @@ +using System.ComponentModel; +using Microsoft.Extensions.Options; + +namespace OpenNetty; + +/// +/// Contains the methods required to ensure that the OpenNetty configuration is valid. +/// +[EditorBrowsable(EditorBrowsableState.Never)] +public sealed class OpenNettyConfiguration : IValidateOptions +{ + /// + public ValidateOptionsResult Validate(string? name, OpenNettyOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + foreach (var endpoint in options.Endpoints) + { + if (!string.IsNullOrEmpty(endpoint.Name) && + (endpoint.Name.Contains('+', StringComparison.OrdinalIgnoreCase) || + endpoint.Name.Contains('*', StringComparison.OrdinalIgnoreCase))) + { + return ValidateOptionsResult.Fail(SR.GetResourceString(SR.ID2000)); + } + } + + return ValidateOptionsResult.Success; + } +} diff --git a/src/OpenNetty/OpenNettyConnection.cs b/src/OpenNetty/OpenNettyConnection.cs new file mode 100644 index 0000000..1290a1e --- /dev/null +++ b/src/OpenNetty/OpenNettyConnection.cs @@ -0,0 +1,306 @@ +using System.IO.Ports; +using System.Net; +using System.Net.Sockets; + +namespace OpenNetty; + +/// +/// Represents a raw connection to an OpenWebNet gateway. +/// +public abstract class OpenNettyConnection : IAsyncDisposable +{ + /// + /// Gets the type of connection. + /// + public abstract OpenNettyConnectionType Type { get; } + + /// + /// Releases the connection. + /// + /// A that can be used to monitor the asynchronous operation. + public abstract ValueTask DisposeAsync(); + + /// + /// Waits until a new frame is received from the gateway. + /// + /// The that can be used to abort the operation. + /// Note: concurrent calls to this API are not allowed. + /// + /// A that can be used to monitor the asynchronous + /// operation and whose result returns the frame received from the gateway. + /// + public abstract ValueTask ReceiveAsync(CancellationToken cancellationToken); + + /// + /// Sends the specified frame to the gateway. + /// + /// The frame. + /// The that can be used to abort the operation. + /// Note: concurrent calls to this API are not allowed. + /// A that can be used to monitor the asynchronous operation. + public abstract ValueTask SendAsync(OpenNettyFrame frame, CancellationToken cancellationToken); + + /// + /// Initializes the connection. + /// + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + protected internal abstract ValueTask InitializeAsync(CancellationToken cancellationToken); + + /// + /// Creates a new connection to the specified gateway. + /// + /// The gateway. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous + /// operation and whose result returns the created connection. + /// + public static ValueTask CreateAsync(OpenNettyGateway gateway, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(gateway); + + return gateway.ConnectionType switch + { + OpenNettyConnectionType.Serial => CreateSerialConnectionAsync(gateway.SerialPort ?? + throw new InvalidOperationException(SR.GetResourceString(SR.ID0070)), cancellationToken), + + OpenNettyConnectionType.Tcp => CreateTcpConnectionAsync(gateway.IPEndpoint ?? + throw new InvalidOperationException(SR.GetResourceString(SR.ID0071)), cancellationToken), + + _ => throw new InvalidOperationException(SR.GetResourceString(SR.ID0072)) + }; + } + + /// + /// Creates a new serial connection to the specified serial port. + /// + /// The serial port. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous + /// operation and whose result returns the created serial connection. + /// + public static async ValueTask CreateSerialConnectionAsync(SerialPort port, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(port); + + if (port.IsOpen) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0105)); + } + + var connection = new SerialConnection(new SerialPort(port.PortName, port.BaudRate, port.Parity, port.DataBits, port.StopBits)); + await connection.InitializeAsync(cancellationToken); + return connection; + } + + /// + /// Creates a new TCP connection to the specified Internet Protocol endpoint. + /// + /// The Internet Protocol endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous + /// operation and whose result returns the created TCP connection. + /// + public static async ValueTask CreateTcpConnectionAsync(IPEndPoint endpoint, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + var socket = new Socket(SocketType.Stream, ProtocolType.Tcp) + { + NoDelay = true + }; + + socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.KeepAlive, true); + socket.SetSocketOption(SocketOptionLevel.Tcp, SocketOptionName.TcpKeepAliveInterval, 1); + socket.SetSocketOption(SocketOptionLevel.Tcp, SocketOptionName.TcpKeepAliveTime, 2); + socket.SetSocketOption(SocketOptionLevel.Tcp, SocketOptionName.TcpKeepAliveRetryCount, 2); + + var connection = new SocketConnection(endpoint, socket); + await connection.InitializeAsync(cancellationToken); + return connection; + } + + /// + /// Represents a serial connection. + /// + private sealed class SerialConnection : OpenNettyConnection + { + private OpenNettyPipe? _pipe; + private SerialPort? _port; + + /// + /// Creates a new instance of the class. + /// + /// The serial port. + public SerialConnection(SerialPort port) + { + ArgumentNullException.ThrowIfNull(port); + + _port = port; + } + + /// + public override OpenNettyConnectionType Type => OpenNettyConnectionType.Serial; + + /// + public override ValueTask DisposeAsync() + { + if (Interlocked.Exchange(ref _pipe, null) is OpenNettyPipe pipe) + { + pipe.Dispose(); + } + + if (Interlocked.Exchange(ref _port, null) is SerialPort port) + { + port.Close(); + port.Dispose(); + } + + return ValueTask.CompletedTask; + } + + /// + public override async ValueTask ReceiveAsync(CancellationToken cancellationToken) + { + if (_pipe is not OpenNettyPipe pipe) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0007)); + } + + return await pipe.ReadAsync(cancellationToken); + } + + /// + public override async ValueTask SendAsync(OpenNettyFrame frame, CancellationToken cancellationToken) + { + if (_pipe is not OpenNettyPipe pipe) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0007)); + } + + await pipe.WriteAsync(frame, cancellationToken); + } + + /// + protected internal override async ValueTask InitializeAsync(CancellationToken cancellationToken) + { + if (_port is not SerialPort port) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0007)); + } + + if (port.IsOpen) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0008)); + } + + await Task.Yield(); + + port.Open(); + port.DiscardInBuffer(); + port.DiscardOutBuffer(); + + _pipe = OpenNettyPipe.Create(port.BaseStream); + } + } + + /// + /// Represents a socket connection. + /// + private sealed class SocketConnection : OpenNettyConnection + { + private readonly EndPoint _endpoint; + private OpenNettyPipe? _pipe; + private Socket? _socket; + + /// + /// Creates a new instance of the class. + /// + /// The endpoint. + /// The socket. + public SocketConnection(EndPoint endpoint, Socket socket) + { + ArgumentNullException.ThrowIfNull(endpoint); + ArgumentNullException.ThrowIfNull(socket); + + _endpoint = endpoint; + _socket = socket; + } + + /// + public override OpenNettyConnectionType Type + { + get + { + if (_socket is not Socket socket) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0007)); + } + + return socket.ProtocolType switch + { + ProtocolType.Tcp => OpenNettyConnectionType.Tcp, + ProtocolType type => throw new InvalidOperationException(SR.FormatID0108(Enum.GetName(type))) + }; + } + } + + /// + public override async ValueTask DisposeAsync() + { + if (Interlocked.Exchange(ref _pipe, null) is OpenNettyPipe pipe) + { + pipe.Dispose(); + } + + if (Interlocked.Exchange(ref _socket, null) is Socket socket) + { + await socket.DisconnectAsync(reuseSocket: false); + socket.Dispose(); + } + } + + /// + public override async ValueTask ReceiveAsync(CancellationToken cancellationToken) + { + if (_pipe is not OpenNettyPipe pipe) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0007)); + } + + return await pipe.ReadAsync(cancellationToken); + } + + /// + public override async ValueTask SendAsync(OpenNettyFrame frame, CancellationToken cancellationToken) + { + if (_pipe is not OpenNettyPipe pipe) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0007)); + } + + await pipe.WriteAsync(frame, cancellationToken); + } + + /// + protected internal override async ValueTask InitializeAsync(CancellationToken cancellationToken) + { + if (_socket is not Socket socket) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0007)); + } + + if (socket.Connected) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0008)); + } + + await _socket.ConnectAsync(_endpoint, cancellationToken); + + _pipe = OpenNettyPipe.Create(new NetworkStream(_socket, ownsSocket: false)); + } + } +} diff --git a/src/OpenNetty/OpenNettyConnectionType.cs b/src/OpenNetty/OpenNettyConnectionType.cs new file mode 100644 index 0000000..810048c --- /dev/null +++ b/src/OpenNetty/OpenNettyConnectionType.cs @@ -0,0 +1,17 @@ +namespace OpenNetty; + +/// +/// Represents the type of an OpenNetty connection. +/// +public enum OpenNettyConnectionType +{ + /// + /// The connection uses a serial port. + /// + Serial = 0, + + /// + /// The connection uses a TCP socket. + /// + Tcp = 1 +} diff --git a/src/OpenNetty/OpenNettyConstants.cs b/src/OpenNetty/OpenNettyConstants.cs new file mode 100644 index 0000000..c477d01 --- /dev/null +++ b/src/OpenNetty/OpenNettyConstants.cs @@ -0,0 +1,39 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty constants. +/// +public static class OpenNettyConstants +{ + /// + /// Delimiters. + /// + public static class Delimiters + { + /// + /// End. + /// + public static ReadOnlySpan End => "##"u8; + + /// + /// Start. + /// + public static ReadOnlySpan Start => "*"u8; + } + + /// + /// Separators. + /// + public static class Separators + { + /// + /// Asterisk. + /// + public static ReadOnlySpan Asterisk => "*"u8; + + /// + /// Hash. + /// + public static ReadOnlySpan Hash => "#"u8; + } +} diff --git a/src/OpenNetty/OpenNettyController.cs b/src/OpenNetty/OpenNettyController.cs new file mode 100644 index 0000000..25fc7b6 --- /dev/null +++ b/src/OpenNetty/OpenNettyController.cs @@ -0,0 +1,1449 @@ +using System.Collections.Immutable; +using System.Globalization; +using System.Reactive.Linq; + +namespace OpenNetty; + +/// +/// Represents a high-level service that can be used to execute common OpenWebNet operations. +/// +public class OpenNettyController +{ + private readonly IOpenNettyService _service; + + /// + /// Creates a new instance of the class. + /// + /// The OpenNetty service. + public OpenNettyController(IOpenNettyService service) + => _service = service ?? throw new ArgumentNullException(nameof(service)); + + /// + /// Adds a new entry to the memory of the specified endpoint. + /// + /// The endpoint. + /// The data to add. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask AddMemoryDataAsync( + OpenNettyEndpoint endpoint, + OpenNettyModels.Diagnostics.MemoryData data, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + ArgumentNullException.ThrowIfNull(data); + + if (!endpoint.HasCapability(OpenNettyCapabilities.MemoryWriting)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.SetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.Diagnostics.MemoryWrite, + values : + [ + data.Media switch + { + OpenNettyMedia.Radio => "64", + OpenNettyMedia.Powerline => "96", + OpenNettyMedia.Infrared => "128", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + data.Address.ToString(), + data.FunctionCode.ToString(CultureInfo.InvariantCulture) + ], + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Unicast, + gateway : endpoint.Gateway, + options : endpoint.GetBooleanSetting(OpenNettySettings.ActionValidation) is not false ? + OpenNettyTransmissionOptions.RequireActionValidation : + OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Adds the specified endpoint to the list of devices associated with the Zigbee scenario that is currently open. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask BindAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.ZigbeeBinding)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Scenario.BindingRequest, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Disables the supervisor mode for the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask DisableSupervisionAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.ZigbeeSupervision)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Management.SupervisorRemove, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Enables the supervisor mode for the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask EnableSupervisionAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.ZigbeeSupervision)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Management.Supervisor, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Cancels the pilot wire derogation mode currently enforced by the specified Nitoo gateway endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask CancelPilotWireDerogationModeAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.TemperatureControl.CancelWirePilotDerogationMode, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Multicast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Dispatches a virtual basic (action) scenario for the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual async ValueTask DispatchBasicScenarioAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.BasicScenario)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Scenario.Action, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Broadcast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + await Task.Delay(TimeSpan.FromSeconds(0.5), cancellationToken); + + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Scenario.StopAction, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Broadcast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Dispatches a virtual ON/OFF scenario for the specified endpoint. + /// + /// The endpoint. + /// The state. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask DispatchOnOffScenarioAsync( + OpenNettyEndpoint endpoint, + OpenNettyModels.Lighting.SwitchState state, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.OnOffScenario)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : state == OpenNettyModels.Lighting.SwitchState.On ? + OpenNettyCommands.Lighting.On : OpenNettyCommands.Lighting.Off, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Broadcast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Dispatches a virtual progressive scenario for the specified endpoint. + /// + /// The endpoint. + /// The scenario duration. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual async ValueTask DispatchProgressiveScenarioAsync( + OpenNettyEndpoint endpoint, + TimeSpan duration, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.ProgressiveScenario)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Scenario.ActionInTime.WithParameters( + /* TIME: */ ((long) (duration.TotalSeconds * 5 + .5)).ToString(CultureInfo.InvariantCulture)), + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Broadcast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + await Task.Delay(TimeSpan.FromSeconds(0.5), cancellationToken); + + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Scenario.StopAction, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Broadcast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Dispatches a virtual timed scenario for the specified endpoint. + /// + /// The endpoint. + /// The duration after which associated devices will change their state. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual async ValueTask DispatchTimedScenarioAsync( + OpenNettyEndpoint endpoint, + TimeSpan duration, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.TimedScenario)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Scenario.ActionForTime.WithParameters( + /* TIME: */ ((long) (duration.TotalSeconds * 5 + .5)).ToString(CultureInfo.InvariantCulture)), + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Broadcast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + await Task.Delay(TimeSpan.FromSeconds(0.5), cancellationToken); + + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Scenario.StopAction, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Broadcast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Inform all the Nitoo devices that memory entries pointing to the specified endpoint should be deleted. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask EraseAddressAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.MemoryWriting)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Diagnostics.AddressErase, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Broadcast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Resolves the current brightness of the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous + /// operation and whose result returns the current brightness of the specified endpoint. + /// + public virtual async ValueTask GetBrightnessAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + return endpoint.Protocol switch + { + OpenNettyProtocol.Nitoo + when endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState) + => await GetUnitDescriptionAsync(endpoint, cancellationToken) switch + { + { FunctionCode: 143, Values: [{ Length: > 0 } value, ..] } + => (ushort) Math.Round(decimal.Parse(value, CultureInfo.InvariantCulture)), + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + + OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee + when endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState) + => await _service.GetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.Lighting.DimmerLevelSpeed, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + filter : static dimension => ValueTask.FromResult( + dimension == OpenNettyDimensions.Lighting.DimmerLevelSpeed || + dimension == OpenNettyDimensions.Lighting.DimmerStatus), + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken) switch + { + [{ Length: > 0 } value, ..] => (ushort) (ushort.Parse(value, CultureInfo.InvariantCulture) - 100), + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + + OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee + when endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) + => await _service.GetStatusAsync( + protocol : endpoint.Protocol, + category : OpenNettyCategories.Lighting, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + filter : static command => ValueTask.FromResult( + command == OpenNettyCommands.Lighting.Off || + command == OpenNettyCommands.Lighting.On || + command == OpenNettyCommands.Lighting.On20 || + command == OpenNettyCommands.Lighting.On30 || + command == OpenNettyCommands.Lighting.On40 || + command == OpenNettyCommands.Lighting.On50 || + command == OpenNettyCommands.Lighting.On60 || + command == OpenNettyCommands.Lighting.On70 || + command == OpenNettyCommands.Lighting.On80 || + command == OpenNettyCommands.Lighting.On90 || + command == OpenNettyCommands.Lighting.On100), + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken) switch + { + var command when command == OpenNettyCommands.Lighting.Off => 0, + var command when command == OpenNettyCommands.Lighting.On => 100, + var command when command == OpenNettyCommands.Lighting.On20 => 20, + var command when command == OpenNettyCommands.Lighting.On30 => 30, + var command when command == OpenNettyCommands.Lighting.On40 => 40, + var command when command == OpenNettyCommands.Lighting.On50 => 50, + var command when command == OpenNettyCommands.Lighting.On60 => 60, + var command when command == OpenNettyCommands.Lighting.On70 => 70, + var command when command == OpenNettyCommands.Lighting.On80 => 80, + var command when command == OpenNettyCommands.Lighting.On90 => 90, + var command when command == OpenNettyCommands.Lighting.On100 => 100, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + + _ => throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)) + }; + } + + /// + /// Resolves the current date/time of the specified SCS gateway endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation + /// and whose result returns the current date/time of the specified SCS gateway endpoint. + /// + public virtual async ValueTask GetDateTimeAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.DateTime)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var values = await _service.GetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.Management.DateTime, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + filter : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + return new DateTimeOffset( + year : int.Parse(values[7], CultureInfo.InvariantCulture), + month : int.Parse(values[6], CultureInfo.InvariantCulture), + day : int.Parse(values[5], CultureInfo.InvariantCulture), + hour : int.Parse(values[0], CultureInfo.InvariantCulture), + minute: int.Parse(values[1], CultureInfo.InvariantCulture), + second: int.Parse(values[2], CultureInfo.InvariantCulture), + offset: values[3] switch + { + ['0', .. { Length: > 0 } value] => +TimeSpan.FromHours(int.Parse(value, CultureInfo.InvariantCulture)), + ['1', .. { Length: > 0 } value] => -TimeSpan.FromHours(int.Parse(value, CultureInfo.InvariantCulture)), + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }); + } + + /// + /// Gets the device description of the specified Nitoo device endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation + /// and whose result returns the device description of the specified Nitoo device endpoint. + /// + public virtual async ValueTask GetDeviceDescriptionAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.DeviceDescription)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var values = await _service.GetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.Diagnostics.DeviceDescription, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Unicast, + filter : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + return OpenNettyModels.Diagnostics.DeviceDescription.CreateFromDeviceDescription(values); + } + + /// + /// Reads all the memory entries associated with the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation + /// and whose result returns the memory entries associated with the specified endpoint. + /// + public virtual async ValueTask> GetMemoryDataAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.MemoryReading)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + // Note: while the memory content is requested using a BUS COMMAND, it is returned asynchronously by + // Nitoo devices using DIMENSION READ frames after the initial BUS COMMAND has been acknowledged. + var dimensions = _service.ObserveDimensionsAsync(OpenNettyProtocol.Nitoo, OpenNettyCategories.Diagnostics, endpoint.Gateway) + .Where(static arguments => arguments.Dimension == OpenNettyDimensions.Diagnostics.MemoryDepth || + arguments.Dimension == OpenNettyDimensions.Diagnostics.MemoryData || + arguments.Dimension == OpenNettyDimensions.Diagnostics.ExtendedMemoryData) + .Where(arguments => arguments.Address == endpoint.Address) + .Replay(); + + // Connect the observable just before sending the command to ensure + // the dimensions are not missed due to a race condition. + await using var connection = await dimensions.ConnectAsync(); + + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Diagnostics.MemoryRead, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Unicast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + var count = await dimensions + .FirstOrDefault(static arguments => arguments.Dimension == OpenNettyDimensions.Diagnostics.MemoryDepth) + .Select(static arguments => int.Parse(arguments.Values[0], CultureInfo.InvariantCulture)) + .Timeout(TimeSpan.FromSeconds(3)) + .RunAsync(cancellationToken); + + if (count is 0) + { + return []; + } + + return [.. await dimensions + .Where(static arguments => arguments.Dimension == OpenNettyDimensions.Diagnostics.MemoryData || + arguments.Dimension == OpenNettyDimensions.Diagnostics.ExtendedMemoryData) + .Take(count) + .Timeout(TimeSpan.FromSeconds(10)) + .ToAsyncEnumerable() + .OrderBy(static arguments => ushort.Parse(arguments.Values[3], CultureInfo.InvariantCulture)) + .Select(static arguments => OpenNettyModels.Diagnostics.MemoryData.CreateFromUnitDescription(arguments.Values)) + .ToListAsync(cancellationToken)]; + } + + /// + /// Gets the number of memory entries associated with the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and whose + /// result returns the number of memory entries associated with the specified endpoint. + /// + public virtual async ValueTask GetMemoryDepthAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.MemoryReading)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + // Note: while the memory depth is requested using a BUS COMMAND, it is returned asynchronously by + // Nitoo devices using DIMENSION READ frames after the initial BUS COMMAND has been acknowledged. + var dimensions = _service.ObserveDimensionsAsync(OpenNettyProtocol.Nitoo, OpenNettyCategories.Diagnostics, endpoint.Gateway) + .Where(static arguments => arguments.Dimension == OpenNettyDimensions.Diagnostics.MemoryDepth) + .Where(arguments => arguments.Address == endpoint.Address) + .Replay(); + + // Connect the observable just before sending the command to ensure + // the dimensions are not missed due to a race condition. + await using var connection = await dimensions.ConnectAsync(); + + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Diagnostics.MemoryRead, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Unicast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + return await dimensions + .Select(static arguments => ushort.Parse(arguments.Values[0], CultureInfo.InvariantCulture)) + .FirstOrDefault() + .Timeout(TimeSpan.FromSeconds(3)) + .RunAsync(cancellationToken); + } + + /// + /// Gets the current pilot wire configuration of the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and + /// whose result returns the current pilot wire configuration of the specified endpoint. + /// + public virtual async ValueTask GetPilotWireConfigurationAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var description = await GetUnitDescriptionAsync(endpoint, cancellationToken); + if (description is not { FunctionCode: 6, Values: [{ Length: > 0 }] values }) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return OpenNettyModels.TemperatureControl.PilotWireConfiguration.CreateFromUnitDescription(values); + } + + /// + /// Gets the unit description of the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation + /// and whose result returns the unit description of the specified endpoint. + /// + public virtual async ValueTask GetUnitDescriptionAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.UnitDescription)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var values = await _service.GetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.Diagnostics.UnitDescription, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Unicast, + filter : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + return OpenNettyModels.Diagnostics.UnitDescription.CreateFromUnitDescription(values); + } + + /// + /// Gets the current uptime of the specified SCS gateway endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation + /// and whose result returns the current uptime of the specified SCS gateway endpoint. + /// + public virtual async ValueTask GetUptimeAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.Uptime)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var values = await _service.GetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.Management.Uptime, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + filter : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + return new TimeSpan( + days : int.Parse(values[0], CultureInfo.InvariantCulture), + hours : int.Parse(values[1], CultureInfo.InvariantCulture), + minutes: int.Parse(values[2], CultureInfo.InvariantCulture), + seconds: int.Parse(values[3], CultureInfo.InvariantCulture)); + } + + /// + /// Gets the smart meter indexes contained in the memory of the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and whose + /// result returns the smart meter indexes contained in the memory of the specified endpoint. + /// + public virtual async ValueTask GetSmartMeterIndexesAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.SmartMeterIndexes)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var values = await _service.GetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.TemperatureControl.SmartMeterIndexes, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Unicast, + filter : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + + return OpenNettyModels.TemperatureControl.SmartMeterIndexes.CreateFromDimensionValues(values); + } + + /// + /// Gets the smart meter information resolved from the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and + /// whose result returns the smart meter information resolved from the specified endpoint. + /// + public virtual async ValueTask GetSmartMeterInformationAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.SmartMeterInformation)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var description = await GetUnitDescriptionAsync(endpoint, cancellationToken); + if (description is not { FunctionCode: 7, Values: [{ Length: > 0 }] values }) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return OpenNettyModels.TemperatureControl.SmartMeterInformation.CreateFromUnitDescription(values); + } + + /// + /// Gets the current switch state of the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous + /// operation and whose result returns the current switch state of the specified endpoint. + /// + public virtual async ValueTask GetSwitchStateAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return endpoint.Protocol switch + { + OpenNettyProtocol.Nitoo => await GetUnitDescriptionAsync(endpoint, cancellationToken) switch + { + { FunctionCode: 129, Values: [{ Length: > 0 } value] } => value is "128" or "129" or "130" ? + OpenNettyModels.Lighting.SwitchState.On : + OpenNettyModels.Lighting.SwitchState.Off, + + { FunctionCode: 143, Values: [{ Length: > 0 } value, ..] } => value is not "0" ? + OpenNettyModels.Lighting.SwitchState.On : + OpenNettyModels.Lighting.SwitchState.Off, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + + _ => await _service.GetStatusAsync( + protocol : endpoint.Protocol, + category : OpenNettyCategories.Lighting, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + filter : static command => ValueTask.FromResult( + command == OpenNettyCommands.Lighting.Off || + command == OpenNettyCommands.Lighting.On || + command == OpenNettyCommands.Lighting.On20 || + command == OpenNettyCommands.Lighting.On30 || + command == OpenNettyCommands.Lighting.On40 || + command == OpenNettyCommands.Lighting.On50 || + command == OpenNettyCommands.Lighting.On60 || + command == OpenNettyCommands.Lighting.On70 || + command == OpenNettyCommands.Lighting.On80 || + command == OpenNettyCommands.Lighting.On90 || + command == OpenNettyCommands.Lighting.On100), + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken) != OpenNettyCommands.Lighting.Off ? + OpenNettyModels.Lighting.SwitchState.On : + OpenNettyModels.Lighting.SwitchState.Off + }; + } + + /// + /// Gets the current water heater state of the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and + /// whose result returns the current water heater state of the specified endpoint. + /// + public virtual async ValueTask GetWaterHeaterStateAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.WaterHeating)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var description = await GetUnitDescriptionAsync(endpoint, cancellationToken); + if (description is not { FunctionCode: 133, Values: [{ Length: > 0 }] values }) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return values[0] switch + { + "0" or "32" => OpenNettyModels.TemperatureControl.WaterHeaterState.Idle, + "1" or "17" or "33" => OpenNettyModels.TemperatureControl.WaterHeaterState.Heating, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }; + } + + /// + /// Clear all the memory entries associated with the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask ResetMemoryDataAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.MemoryWriting)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Diagnostics.MemoryReset, + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Unicast, + gateway : endpoint.Gateway, + options : endpoint.GetBooleanSetting(OpenNettySettings.ActionValidation) is not false ? + OpenNettyTransmissionOptions.RequireActionValidation : + OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Sets the brightness of the specified endpoint. + /// + /// The endpoint. + /// The brightness level, from 0 to 100. + /// The optional transition duration. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask SetBrightnessAsync( + OpenNettyEndpoint endpoint, + ushort level, + TimeSpan? duration = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (level is > 100) + { + throw new ArgumentOutOfRangeException(nameof(level)); + } + + // Note: for unknown reasons, specifying a SPEED parameter that is less than 10 (2 seconds * 5) + // results in an immediate - rather than progressive - brightness change on Nitoo devices when + // the requested level is higher than 50%. To discourage users of this API to set values that + // may exhibit this issue, a sanity check is performed here to require an adequate duration. + if (endpoint.Protocol is OpenNettyProtocol.Nitoo && level is > 50 && + duration is not null && duration < TimeSpan.FromSeconds(2)) + { + throw new ArgumentOutOfRangeException(nameof(duration)); + } + + if (endpoint.Protocol is OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee && + duration is not null && duration > TimeSpan.FromSeconds(50)) + { + throw new ArgumentOutOfRangeException(nameof(duration)); + } + + // Note: Nitoo devices support a very long duration, but to encourage users of this API + // to use reasonable values, the maximum duration allowed is currently set to 5 minutes. + if (endpoint.Protocol is OpenNettyProtocol.Nitoo && + duration is not null && duration > TimeSpan.FromMinutes(5)) + { + throw new ArgumentOutOfRangeException(nameof(duration)); + } + + if (endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingControl)) + { + // Note: Zigbee/SCS gateways generally don't treat DimmerLevel100=100 (with 100 meaning "off") as + // an equivalent of the OFF BUS COMMAND in DIMENSION SET requests. To ensure the light is turned + // off when the brightness is set to 0, an OFF BUS COMMAND is used for Zigbee/SCS gateways. + if (level is 0 && endpoint.Protocol is OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee) + { + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Lighting.Off, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + // Note: while Zigbee/SCS gateways use 101-200 as the brightness range, the Nitoo gateway uses 0-100. + else if (endpoint.Protocol is OpenNettyProtocol.Nitoo) + { + return _service.SetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.Lighting.DimmerLevelSpeed, + values : [ + level.ToString(CultureInfo.InvariantCulture), + duration is not null ? + ((long) ((duration ?? TimeSpan.FromSeconds(2)).TotalSeconds * 5 + .5)).ToString(CultureInfo.InvariantCulture) : + "0" + ], + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Unicast, + gateway : endpoint.Gateway, + options : endpoint.GetBooleanSetting(OpenNettySettings.ActionValidation) is not false ? + OpenNettyTransmissionOptions.RequireActionValidation : + OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + return _service.SetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.Lighting.DimmerLevelSpeed, + values : [ + (level + 100).ToString(CultureInfo.InvariantCulture), + duration is not null ? + ((long) ((duration ?? TimeSpan.FromSeconds(2)).TotalSeconds * 5 + .5)).ToString(CultureInfo.InvariantCulture) : + "0" + ], + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + else if (endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingControl)) + { + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : level switch + { + 0 => OpenNettyCommands.Lighting.Off, + >= 1 and <= 29 => OpenNettyCommands.Lighting.On20, + >= 30 and <= 39 => OpenNettyCommands.Lighting.On30, + >= 40 and <= 49 => OpenNettyCommands.Lighting.On40, + >= 50 and <= 59 => OpenNettyCommands.Lighting.On50, + >= 60 and <= 69 => OpenNettyCommands.Lighting.On60, + >= 70 and <= 79 => OpenNettyCommands.Lighting.On70, + >= 80 and <= 89 => OpenNettyCommands.Lighting.On80, + >= 90 and <= 99 => OpenNettyCommands.Lighting.On90, + _ => OpenNettyCommands.Lighting.On100 + }, + address : endpoint.Address, + media : endpoint.Media, + mode : endpoint.Protocol is OpenNettyProtocol.Nitoo ? OpenNettyMode.Unicast : null, + gateway : endpoint.Gateway, + options : endpoint.Protocol is OpenNettyProtocol.Nitoo && + endpoint.GetBooleanSetting(OpenNettySettings.ActionValidation) is not false ? + OpenNettyTransmissionOptions.RequireActionValidation : + OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + /// + /// Sets the date/time of the specified SCS gateway endpoint. + /// + /// The endpoint. + /// The date/time. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask SetDateTimeAsync( + OpenNettyEndpoint endpoint, + DateTimeOffset date, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.DateTime)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.SetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.Management.DateTime, + values : + [ + date.Hour.ToString("00", CultureInfo.InvariantCulture), + date.Minute.ToString("00", CultureInfo.InvariantCulture), + date.Second.ToString("00", CultureInfo.InvariantCulture), + date.Offset switch + { + TimeSpan offset when offset > TimeSpan.Zero => "0" + offset.TotalHours.ToString("00", CultureInfo.InvariantCulture), + TimeSpan offset when offset == TimeSpan.Zero => "000", + TimeSpan offset when offset < TimeSpan.Zero => "1" + offset.TotalHours.ToString("00", CultureInfo.InvariantCulture), + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + ((int) date.DayOfWeek).ToString("00", CultureInfo.InvariantCulture), + date.Day.ToString("00", CultureInfo.InvariantCulture), + date.Month.ToString("00", CultureInfo.InvariantCulture), + date.Year.ToString("0000", CultureInfo.InvariantCulture), + ], + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Sets the pilot wire degoration mode that will be enforced by the specified endpoint. + /// + /// The endpoint. + /// The derogation mode. + /// The derogation duration. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask SetPilotWireDerogationModeAsync( + OpenNettyEndpoint endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode mode, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration duration, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!Enum.IsDefined(mode)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + if (!Enum.IsDefined(duration)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + if (!endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var value = mode switch + { + OpenNettyModels.TemperatureControl.PilotWireMode.Comfort => 0, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne => 1, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo => 2, + OpenNettyModels.TemperatureControl.PilotWireMode.Eco => 3, + OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection => 4, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }; + + if (duration is OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours) + { + value += 32; + } + + else if (duration is OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours) + { + value += 128; + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.TemperatureControl.WirePilotDerogationMode.WithParameters( + /* MODE: */ value.ToString(CultureInfo.InvariantCulture)), + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Multicast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Sets the pilot wire setpoint mode that will be applied by the specified endpoint. + /// + /// The endpoint. + /// The setpoint mode. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask SetPilotWireSetpointModeAsync( + OpenNettyEndpoint endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode mode, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!Enum.IsDefined(mode)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + if (!endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var value = mode switch + { + OpenNettyModels.TemperatureControl.PilotWireMode.Comfort => 0, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne => 1, + OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo => 2, + OpenNettyModels.TemperatureControl.PilotWireMode.Eco => 3, + OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection => 4, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }; + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.TemperatureControl.WirePilotSetpointMode.WithParameters( + /* MODE: */ value.ToString(CultureInfo.InvariantCulture)), + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Multicast, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Sets the water heater setpoint mode that will be applied by the specified endpoint. + /// + /// The endpoint. + /// The water heater mode. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask SetWaterHeaterSetpointModeAsync( + OpenNettyEndpoint endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode mode, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!Enum.IsDefined(mode)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + if (!endpoint.HasCapability(OpenNettyCapabilities.WaterHeating)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.SetDimensionAsync( + protocol : endpoint.Protocol, + dimension : OpenNettyDimensions.TemperatureControl.WaterHeatingMode, + values : [mode switch + { + OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOff => "0", + OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOn => "1", + OpenNettyModels.TemperatureControl.WaterHeaterMode.Automatic => "2", + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }], + address : endpoint.Address, + media : endpoint.Media, + mode : OpenNettyMode.Unicast, + gateway : endpoint.Gateway, + options : endpoint.GetBooleanSetting(OpenNettySettings.ActionValidation) is not false ? + OpenNettyTransmissionOptions.RequireActionValidation : + OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Switches the specified endpoint off. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask SwitchOffAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitching)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + if (string.Equals(endpoint.GetStringSetting(OpenNettySettings.SwitchMode), + "Push button", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Lighting.Off, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : endpoint.Protocol is OpenNettyProtocol.Nitoo && + endpoint.GetBooleanSetting(OpenNettySettings.ActionValidation) is not false ? + OpenNettyTransmissionOptions.RequireActionValidation : + OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + /// + /// Switches the specified endpoint on. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask SwitchOnAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitching)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var options = OpenNettyTransmissionOptions.None; + + if (endpoint.Protocol is OpenNettyProtocol.Nitoo && + endpoint.GetBooleanSetting(OpenNettySettings.ActionValidation) is not false) + { + options |= OpenNettyTransmissionOptions.RequireActionValidation; + } + + // If the endpoint was configured to use the push-button mode, always disable retransmissions + // as ON commands are not idempotent when using this mode, which may result in unwanted results. + if (string.Equals(endpoint.GetStringSetting(OpenNettySettings.SwitchMode), + "Push button", StringComparison.OrdinalIgnoreCase)) + { + options |= OpenNettyTransmissionOptions.DisallowRetransmissions; + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Lighting.On, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : options, + cancellationToken: cancellationToken); + } + + /// + /// Toggles the state of the specified endpoint. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual async ValueTask ToggleAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitching)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + // Nitoo and SCS gateways don't natively support toggle BUS COMMANDS (unlike Zigbee + // gateways). To work around this limitation, the current status of the device/unit + // is retrieved first and an ON or OFF command is sent depending on the result. + if (endpoint.Protocol is OpenNettyProtocol.Nitoo or OpenNettyProtocol.Scs) + { + if (!endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + if (string.Equals(endpoint.GetStringSetting(OpenNettySettings.SwitchMode), + "Push button", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + var state = await GetSwitchStateAsync(endpoint, cancellationToken); + + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : state is OpenNettyModels.Lighting.SwitchState.On ? + OpenNettyCommands.Lighting.Off : + OpenNettyCommands.Lighting.On, + address : endpoint.Address, + media : endpoint.Media, + mode : endpoint.Protocol is OpenNettyProtocol.Nitoo ? OpenNettyMode.Unicast : null, + gateway : endpoint.Gateway, + options : endpoint.Protocol is OpenNettyProtocol.Nitoo && + endpoint.GetBooleanSetting(OpenNettySettings.ActionValidation) is not false ? + OpenNettyTransmissionOptions.RequireActionValidation : + OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + + else + { + await _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Lighting.Toggle, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } + } + + /// + /// Removes the specified endpoint from the list of devices associated with the Zigbee scenario that is currently open. + /// + /// The endpoint. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + public virtual ValueTask UnbindAsync( + OpenNettyEndpoint endpoint, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(endpoint); + + if (!endpoint.HasCapability(OpenNettyCapabilities.ZigbeeBinding)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0076)); + } + + return _service.ExecuteCommandAsync( + protocol : endpoint.Protocol, + command : OpenNettyCommands.Scenario.UnbindingRequest, + address : endpoint.Address, + media : endpoint.Media, + mode : null, + gateway : endpoint.Gateway, + options : OpenNettyTransmissionOptions.None, + cancellationToken: cancellationToken); + } +} diff --git a/src/OpenNetty/OpenNettyCoordinator.cs b/src/OpenNetty/OpenNettyCoordinator.cs new file mode 100644 index 0000000..7fb629f --- /dev/null +++ b/src/OpenNetty/OpenNettyCoordinator.cs @@ -0,0 +1,1806 @@ +using System.Globalization; +using System.Reactive.Disposables; +using System.Reactive.Linq; +using static OpenNetty.OpenNettyEvents; + +namespace OpenNetty; + +/// +/// Contains the logic necessary to infer high-level events from incoming +/// and outgoing notifications dispatched by the OpenNetty pipeline. +/// +public sealed class OpenNettyCoordinator : IOpenNettyHandler +{ + private readonly OpenNettyController _controller; + private readonly OpenNettyEvents _events; + private readonly OpenNettyLogger _logger; + private readonly OpenNettyManager _manager; + private readonly IOpenNettyPipeline _pipeline; + + /// + /// Creates a new instance of the class. + /// + /// The OpenNetty controller. + /// The OpenNetty events. + /// The OpenNetty logger. + /// The OpenNetty manager. + /// The OpenNetty pipeline. + public OpenNettyCoordinator( + OpenNettyController controller, + OpenNettyEvents events, + OpenNettyLogger logger, + OpenNettyManager manager, + IOpenNettyPipeline pipeline) + { + _controller = controller ?? throw new ArgumentNullException(nameof(controller)); + _events = events ?? throw new ArgumentNullException(nameof(events)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _manager = manager ?? throw new ArgumentNullException(nameof(manager)); + _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); + } + + /// + async ValueTask IOpenNettyHandler.SubscribeAsync() => StableCompositeAsyncDisposable.Create( + [ + // Note: this event handler is responsible for monitoring incoming and outgoing frames to detect state + // changes affecting - directly or indirectly (e.g via a Nitoo PnL scenario) - registered endpoints. + await _pipeline.SelectMany(static notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand or + OpenNettyMessageType.DimensionRead or + OpenNettyMessageType.DimensionSet, + Address : not null, + Mode : OpenNettyMode.Broadcast } message } + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Event, + Message: { + Protocol: OpenNettyProtocol.Scs, + Type : OpenNettyMessageType.BusCommand or + OpenNettyMessageType.DimensionRead or + OpenNettyMessageType.DimensionSet, + Address : not null } message } + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Zigbee, + Type : OpenNettyMessageType.BusCommand or + OpenNettyMessageType.DimensionRead or + OpenNettyMessageType.DimensionSet, + Address : not null } message } + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + // Note: unlike SCS (and Zigbee devices when the supervision mode is enabled), Nitoo devices + // never report back state changes to the OpenWebNet gateway when the change originates from + // the gateway itself. To ensure events are correctly reported, the outgoing Nitoo BUS COMMAND + // and DIMENSION SET messages that have been acknowledged by the gateway (and optionally validated + // by the remote device using a special "VALID ACTION" BUS COMMAND message) are monitored here. + OpenNettyNotifications.MessageSent { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand or OpenNettyMessageType.DimensionSet, + Address : not null } message } + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + _ => AsyncObservable.Empty<(OpenNettyNotification Notification, OpenNettyMessage Message)>() + }) + .Do(async arguments => + { + // Important: to ensure the order of events is preserved, this RX event handler processes incoming and outgoing + // messages sequentially. As such, it is critical that this asynchronous method completes as quickly as possible. + + switch (arguments) + { + // The switch state and the brightness level of an endpoint can be inferred from 6 types of messages: + // + // - For Nitoo devices, from an incoming or outgoing "OFF" or "ON" BUS COMMAND message. + // - For SCS/Zigbee devices, from an incoming "OFF" or "ON" - parameterized or not - BUS COMMAND message. + // - For SCS/Zigbee devices, from an incoming "ON%" BUS COMMAND message. + // - For SCS/Zigbee devices, from an incoming "DIMMER SPEED LEVEL" or "DIMMER STATUS" DIMENSION READ message. + // - For Nitoo devices, from an incoming "UNIT DESCRIPTION" DIMENSION READ message. + // - For Nitoo devices, from an outgoing "DIMMER SPEED LEVEL" DIMENSION SET message. + + case (OpenNettyNotification notification, + OpenNettyMessage { Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address, + Mode : OpenNettyMode mode }) + when command == OpenNettyCommands.Lighting.Off || command == OpenNettyCommands.Lighting.On: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + // If the message was received and was emitted by the source using + // a broadcast transmission, it is considered as an ON/OFF scenario. + if (notification is OpenNettyNotifications.MessageReceived && mode is OpenNettyMode.Broadcast && + endpoint.HasCapability(OpenNettyCapabilities.OnOffScenario)) + { + await _events.PublishAsync(new OnOffScenarioReportedEventArgs(endpoint, command == OpenNettyCommands.Lighting.On ? + OpenNettyModels.Lighting.SwitchState.On : + OpenNettyModels.Lighting.SwitchState.Off)); + } + + List tasks = []; + + // Note: outgoing ON/OFF commands sent using broadcast or multicast transmission + // generally don't affect the local output of a Nitoo lighting device. As such, + // the switch state/brightness of the endpoint is only reported if the message was + // received (e.g by a different unit on the same device) or was sent in unicast. + if (mode is OpenNettyMode.Unicast || notification is OpenNettyNotifications.MessageReceived) + { + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState) || + endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)) + { + tasks.Add(ReportStateAsync(endpoint, CancellationToken.None).AsTask()); + } + + if (endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit.Definition.AssociatedUnitId: ushort unit }) + { + tasks.Add(Task.Run(async () => + { + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromNitooAddress( + OpenNettyAddress.ToNitooAddress(address).Identifier, unit)); + + if (endpoint is null) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState) || + endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)) + { + await ReportStateAsync(endpoint, CancellationToken.None); + } + })); + } + } + + if (mode is OpenNettyMode.Broadcast or OpenNettyMode.Multicast && endpoint is { Unit.Scenarios: [_, ..] scenarios }) + { + var endpoints = scenarios.ToAsyncEnumerable() + .Where(static scenario => scenario.FunctionCode is < 105) + .SelectAwait(scenario => _manager.FindEndpointByNameAsync(scenario.EndpointName)) + .Where(static endpoint => endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit: OpenNettyUnit }) + .OfType() + .Where(static endpoint => endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState) || + endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)); + + tasks.Add(Parallel.ForEachAsync(endpoints, ReportStateAsync)); + } + + await Task.WhenAll(tasks); + + async ValueTask ReportStateAsync(OpenNettyEndpoint endpoint, CancellationToken cancellationToken) + { + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + if (command == OpenNettyCommands.Lighting.On) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, + OpenNettyModels.Lighting.SwitchState.On), cancellationToken); + + // Note: if the endpoint was configured to use the push-button mode, dispatch an OFF state + // event immediately after switching it on (or receiving a notification indicating it was + // switched on), as Nitoo devices using this mode don't automatically report the OFF state. + if (string.Equals(endpoint.GetStringSetting(OpenNettySettings.SwitchMode), "Push button", StringComparison.OrdinalIgnoreCase)) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, + OpenNettyModels.Lighting.SwitchState.Off), cancellationToken); + } + } + + else + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, + OpenNettyModels.Lighting.SwitchState.Off), cancellationToken); + } + } + + // Note: for Nitoo devices supporting dimming, an ON command always changes the brightness to 100%. + if (command == OpenNettyCommands.Lighting.On && (endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState))) + { + await _events.PublishAsync(new BrightnessReportedEventArgs(endpoint, 100), cancellationToken); + } + } + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol: OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address }) + // Note: ON/OFF BUS COMMAND frames can be parameterized. + when command.Category == OpenNettyCategories.Lighting && + (command.Value == OpenNettyCommands.Lighting.On.Value || + command.Value == OpenNettyCommands.Lighting.Off.Value): + { + await Parallel.ForEachAsync(_manager.FindEndpointsByAddressAsync(address), async (endpoint, cancellationToken) => + { + // Ignore the message if it was received by a different gateway than the one associated with the endpoint. + if (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway) + { + return; + } + + // SCS devices configured to use the PUL mode never react to area and general commands. + if (address.Type is OpenNettyAddressType.ScsLightPointArea or OpenNettyAddressType.ScsLightPointGeneral && + string.Equals(endpoint.GetStringSetting(OpenNettySettings.SwitchMode), "Push button", StringComparison.OrdinalIgnoreCase)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, + command.Value == OpenNettyCommands.Lighting.On.Value ? + OpenNettyModels.Lighting.SwitchState.On : + OpenNettyModels.Lighting.SwitchState.Off), cancellationToken); + } + }); + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol: OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address }) + when command == OpenNettyCommands.Lighting.On20 || + command == OpenNettyCommands.Lighting.On30 || + command == OpenNettyCommands.Lighting.On40 || + command == OpenNettyCommands.Lighting.On50 || + command == OpenNettyCommands.Lighting.On60 || + command == OpenNettyCommands.Lighting.On70 || + command == OpenNettyCommands.Lighting.On80 || + command == OpenNettyCommands.Lighting.On90 || + command == OpenNettyCommands.Lighting.On100: + { + await Parallel.ForEachAsync(_manager.FindEndpointsByAddressAsync(address), async (endpoint, cancellationToken) => + { + // Ignore the message if it was received by a different gateway than the one associated with the endpoint. + if (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway) + { + return; + } + + // SCS devices configured to use the PUL mode never react to area and general commands. + if (address.Type is OpenNettyAddressType.ScsLightPointArea or OpenNettyAddressType.ScsLightPointGeneral && + string.Equals(endpoint.GetStringSetting(OpenNettySettings.SwitchMode), "Push button", StringComparison.OrdinalIgnoreCase)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, + OpenNettyModels.Lighting.SwitchState.On), cancellationToken); + } + + // Note: brightness level changes reported via ON% BUS COMMAND frames are deliberately + // ignored for endpoints that support advanced dimming, as this method often gives very + // imprecise results that are inconsistent with the brightness level retrieved using a + // "DIMMER LEVEL SPEED" or "DIMMER STATUS" DIMENSION REQUEST frame (e.g when setting the + // brightness to 30%, a F418U2 SCS dimmer correctly reports the "130" value when using + // a DIMENSION REQUEST but returns "5" (50%) when using a STATUS REQUEST. To avoid that, + // a specialized event handler is responsible for monitoring ON% BUS COMMAND frames and + // retrieving the exact brightness level using a "DIMMER LEVEL SPEED" DIMENSION REQUEST. + if (endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) && + !endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)) + { + await _events.PublishAsync(new BrightnessReportedEventArgs(endpoint, (ushort) + (command == OpenNettyCommands.Lighting.On20 ? 20 : + command == OpenNettyCommands.Lighting.On30 ? 30 : + command == OpenNettyCommands.Lighting.On40 ? 40 : + command == OpenNettyCommands.Lighting.On50 ? 50 : + command == OpenNettyCommands.Lighting.On60 ? 60 : + command == OpenNettyCommands.Lighting.On70 ? 70 : + command == OpenNettyCommands.Lighting.On80 ? 80 : + command == OpenNettyCommands.Lighting.On90 ? 90 : 100)), cancellationToken); + } + }); + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : [{ Length: > 0 } value, ..] }) + when dimension == OpenNettyDimensions.Lighting.DimmerLevelSpeed || + dimension == OpenNettyDimensions.Lighting.DimmerStatus: + { + await Parallel.ForEachAsync(_manager.FindEndpointsByAddressAsync(address), async (endpoint, cancellationToken) => + { + // Ignore the message if it was received by a different gateway than the one associated with the endpoint. + if (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway) + { + return; + } + + // SCS devices configured to use the PUL mode never react to area and general commands. + if (address.Type is OpenNettyAddressType.ScsLightPointArea or OpenNettyAddressType.ScsLightPointGeneral && + string.Equals(endpoint.GetStringSetting(OpenNettySettings.SwitchMode), "Push button", StringComparison.OrdinalIgnoreCase)) + { + return; + } + + var level = (ushort) (ushort.Parse(value, CultureInfo.InvariantCulture) - 100); + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, level is not 0 ? + OpenNettyModels.Lighting.SwitchState.On : + OpenNettyModels.Lighting.SwitchState.Off), cancellationToken); + } + + // Note: the special brightness level "0" always indicates that the output is switched off. + // To avoid overriding the last known level (which is typically restored by SCS devices when + // receiving an ON command), the brightness level is only reported if it's higher than zero. + if (level is not 0 && (endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState))) + { + await _events.PublishAsync(new BrightnessReportedEventArgs(endpoint, level), cancellationToken); + } + }); + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : ["129", { Length: > 0 } value] }) + when dimension == OpenNettyDimensions.Diagnostics.UnitDescription: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, value is "128" or "129" or "130" ? + OpenNettyModels.Lighting.SwitchState.On : + OpenNettyModels.Lighting.SwitchState.Off)); + } + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : ["143", { Length: > 0 } value, ..] }) + when dimension == OpenNettyDimensions.Diagnostics.UnitDescription: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + var level = ushort.Parse(value, CultureInfo.InvariantCulture); + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, level is not 0 ? + OpenNettyModels.Lighting.SwitchState.On : + OpenNettyModels.Lighting.SwitchState.Off)); + } + + // Note: the special brightness level "0" always indicates that the output is switched off. + // While Nitoo devices normally don't restore the last known brightness level, the brightness + // level is only reported if it's higher than zero for consistency with MyHome/SCS devices. + if (level is not 0 && (endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState))) + { + await _events.PublishAsync(new BrightnessReportedEventArgs(endpoint, level)); + } + break; + } + + // Note: outgoing dimmer level commands sent using broadcast or multicast transmission + // generally don't affect the local output of a Nitoo lighting device. As such, the switch + // state/brightness of the endpoint is only reported if the message was sent in unicast. + case (OpenNettyNotifications.MessageReceived or OpenNettyNotifications.MessageSent, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionSet, + Address : OpenNettyAddress address, + Mode : OpenNettyMode.Unicast, + Dimension: OpenNettyDimension dimension, + Values : [{ Length: > 0 } value, ..] }) + when dimension == OpenNettyDimensions.Lighting.DimmerLevelSpeed: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + var level = (ushort) Math.Round(decimal.Parse(value, CultureInfo.InvariantCulture), MidpointRounding.AwayFromZero); + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, level is not 0 ? + OpenNettyModels.Lighting.SwitchState.On : + OpenNettyModels.Lighting.SwitchState.Off)); + } + + // Note: the special brightness level "0" always indicates that the output is switched off. + // While Nitoo devices normally don't restore the last known brightness level, the brightness + // level is only reported if it's higher than zero for consistency with MyHome/SCS devices. + if (level is not 0 && (endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState))) + { + await _events.PublishAsync(new BrightnessReportedEventArgs(endpoint, level)); + } + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol: OpenNettyProtocol.Zigbee, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address }) + when command == OpenNettyCommands.Lighting.Toggle: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.ToggleScenario)) + { + await _events.PublishAsync(new ToggleScenarioReportedEventArgs(endpoint)); + } + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : [{ Length: > 0 }, { Length: > 0 }, { Length: > 0 }] values }) + when dimension == OpenNettyDimensions.TemperatureControl.SmartMeterIndexes: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.SmartMeterIndexes)) + { + await _events.PublishAsync(new SmartMeterIndexesReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.SmartMeterIndexes.CreateFromDimensionValues(values))); + } + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : [{ Length: > 0 } value] }) + when dimension == OpenNettyDimensions.TemperatureControl.SmartMeterRateType: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.SmartMeterInformation)) + { + await _events.PublishAsync(new SmartMeterRateTypeReportedEventArgs(endpoint, value switch + { + "2" => OpenNettyModels.TemperatureControl.SmartMeterRateType.OffPeak, + "3" => OpenNettyModels.TemperatureControl.SmartMeterRateType.Peak, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + })); + } + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : ["7", { Length: > 0 } value] }) + when dimension == OpenNettyDimensions.Diagnostics.UnitDescription: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.SmartMeterInformation)) + { + await _events.PublishAsync(new SmartMeterRateTypeReportedEventArgs(endpoint, value switch + { + "32" or "33" => OpenNettyModels.TemperatureControl.SmartMeterRateType.OffPeak, + "48" or "49" => OpenNettyModels.TemperatureControl.SmartMeterRateType.Peak, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + })); + + await _events.PublishAsync(new SmartMeterPowerCutModeReportedEventArgs(endpoint, value is "33" or "49")); + } + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : ["133", { Length: > 0 } value] }) + when dimension == OpenNettyDimensions.Diagnostics.UnitDescription: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.WaterHeating)) + { + switch (value) + { + // Note: the value "0" is ambiguous and appears to be used to represent two cases: + // + // - When the user selected the "automatic" mode but hot water is not currently + // being produced (e.g because the off-peak signal was not yet received). + // + // - When no hot water is produced because the user selected the "forced off" mode. + // + // Since the value is ambiguous, it is not possible to reliably determine the actual + // water heating mode: in this case, the mode is not immediately reported and an another + // event handler is responsible for reporting it when the off-peak signal is received. + case "0": + await _events.PublishAsync(new WaterHeaterStateReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterState.Idle)); + break; + + case "32": + await _events.PublishAsync(new WaterHeaterSetpointModeReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode.Automatic)); + + await _events.PublishAsync(new WaterHeaterStateReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterState.Idle)); + break; + + case "1" or "33": + await _events.PublishAsync(new WaterHeaterSetpointModeReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode.Automatic)); + + await _events.PublishAsync(new WaterHeaterStateReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterState.Heating)); + break; + + case "17": + await _events.PublishAsync(new WaterHeaterSetpointModeReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOn)); + + await _events.PublishAsync(new WaterHeaterStateReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterState.Heating)); + break; + } + } + break; + } + + case (OpenNettyNotifications.MessageSent, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionSet, + Address : OpenNettyAddress address, + Mode : OpenNettyMode mode, + Dimension: OpenNettyDimension dimension, + Values : [{ Length: > 0 } value, ..] }) + when dimension == OpenNettyDimensions.TemperatureControl.WaterHeatingMode: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + List tasks = []; + + if (endpoint.HasCapability(OpenNettyCapabilities.WaterHeating)) + { + tasks.Add(ReportSetpointModeAsync(endpoint, CancellationToken.None).AsTask()); + } + + if (endpoint is { Unit.Definition.AssociatedUnitId: ushort unit }) + { + tasks.Add(Task.Run(async () => + { + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromNitooAddress( + OpenNettyAddress.ToNitooAddress(address).Identifier, unit)); + + if (endpoint is not null && endpoint.HasCapability(OpenNettyCapabilities.WaterHeating)) + { + await ReportSetpointModeAsync(endpoint, CancellationToken.None); + } + })); + } + + if (mode is OpenNettyMode.Broadcast or OpenNettyMode.Multicast && endpoint is { Unit.Scenarios: [_, ..] scenarios }) + { + var endpoints = scenarios.ToAsyncEnumerable() + .Where(static scenario => scenario.FunctionCode is 255) + .SelectAwait(scenario => _manager.FindEndpointByNameAsync(scenario.EndpointName)) + .Where(static endpoint => endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit: OpenNettyUnit }) + .OfType() + .Where(static endpoint => endpoint.HasCapability(OpenNettyCapabilities.WaterHeating)); + + tasks.Add(Parallel.ForEachAsync(endpoints, ReportSetpointModeAsync)); + } + + await Task.WhenAll(tasks); + + async ValueTask ReportSetpointModeAsync(OpenNettyEndpoint endpoint, CancellationToken cancellationToken) => + await _events.PublishAsync(new WaterHeaterSetpointModeReportedEventArgs(endpoint, value switch + { + "0" => OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOff, + "1" => OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOn, + "2" => OpenNettyModels.TemperatureControl.WaterHeaterMode.Automatic, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }), cancellationToken); + break; + } + + // The partial state of a pilot wire device can be inferred from 3 types of incoming messages: + // + // - From a "UNIT DESCRIPTION" DIMENSION READ message. + // - From a parameterized "WIRE PILOT SETPOINT MODE" BUS COMMAND message. + // - From a parameterized "WIRE PILOT DEROGATION MODE" BUS COMMAND message. + // - From a "CANCEL WIRE PILOT DEROGATION" BUS COMMAND message. + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : ["6", { Length: > 0 } value] }) + when dimension == OpenNettyDimensions.Diagnostics.UnitDescription: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + var configuration = OpenNettyModels.TemperatureControl.PilotWireConfiguration.CreateFromUnitDescription([value]); + if (configuration.IsDerogationActive) + { + await _events.PublishAsync(new PilotWireDerogationModeReportedEventArgs(endpoint, configuration.Mode, configuration.DerogationDuration)); + } + + else + { + await _events.PublishAsync(new PilotWireSetpointModeReportedEventArgs(endpoint, configuration.Mode)); + await _events.PublishAsync(new PilotWireDerogationModeReportedEventArgs(endpoint, null, null)); + } + } + break; + } + + case (OpenNettyNotification notification, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address, + Mode : OpenNettyMode mode }) + when command.Category == OpenNettyCategories.TemperatureControl && + command.Value == OpenNettyCommands.TemperatureControl.WirePilotSetpointMode.Value && + command.Parameters is [{ Length: > 0 } value]: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + List tasks = []; + + if (endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + tasks.Add(ReportDerogationAndSetpointModesAsync(endpoint, CancellationToken.None).AsTask()); + } + + if (endpoint is { Unit.Definition.AssociatedUnitId: ushort unit }) + { + tasks.Add(Task.Run(async () => + { + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromNitooAddress( + OpenNettyAddress.ToNitooAddress(address).Identifier, unit)); + + if (endpoint is not null && endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + await ReportDerogationAndSetpointModesAsync(endpoint, CancellationToken.None); + } + })); + } + + if (mode is OpenNettyMode.Broadcast or OpenNettyMode.Multicast && endpoint is { Unit.Scenarios: [_, ..] scenarios }) + { + var endpoints = scenarios.ToAsyncEnumerable() + .Where(static scenario => scenario.FunctionCode is 255) + .SelectAwait(scenario => _manager.FindEndpointByNameAsync(scenario.EndpointName)) + .Where(static endpoint => endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit: OpenNettyUnit }) + .OfType() + .Where(static endpoint => endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)); + + tasks.Add(Parallel.ForEachAsync(endpoints, ReportDerogationAndSetpointModesAsync)); + } + + await Task.WhenAll(tasks); + + async ValueTask ReportDerogationAndSetpointModesAsync(OpenNettyEndpoint endpoint, CancellationToken cancellationToken) + { + await _events.PublishAsync(new PilotWireSetpointModeReportedEventArgs(endpoint, value switch + { + "0" => OpenNettyModels.TemperatureControl.PilotWireMode.Comfort, + "1" => OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne, + "2" => OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo, + "3" => OpenNettyModels.TemperatureControl.PilotWireMode.Eco, + "4" => OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }), cancellationToken); + + // Note: setting the setpoint mode may not have an immediate effect on the device (e.g if a + // derogation mode was set with a minimal duration during which setpoint commands are ignored). + // As such, the derogation mode cannot be reported here, as it may still be active on the device. + if (notification is OpenNettyNotifications.MessageSent) + { + await _events.PublishAsync(new PilotWireDerogationModeReportedEventArgs(endpoint, null, null), cancellationToken); + } + } + break; + } + + case (OpenNettyNotifications.MessageReceived or OpenNettyNotifications.MessageSent, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address, + Mode : OpenNettyMode mode }) + when command.Category == OpenNettyCategories.TemperatureControl && + command.Value == OpenNettyCommands.TemperatureControl.WirePilotDerogationMode.Value && + command.Parameters is [{ Length: > 0 } value]: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + List tasks = []; + + if (endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + tasks.Add(ReportDerogationModeAsync(endpoint, CancellationToken.None).AsTask()); + } + + if (endpoint is { Unit.Definition.AssociatedUnitId: ushort unit }) + { + tasks.Add(Task.Run(async () => + { + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromNitooAddress( + OpenNettyAddress.ToNitooAddress(address).Identifier, unit)); + + if (endpoint is not null && endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + await ReportDerogationModeAsync(endpoint, CancellationToken.None); + } + })); + } + + if (mode is OpenNettyMode.Broadcast or OpenNettyMode.Multicast && endpoint is { Unit.Scenarios: [_, ..] scenarios }) + { + var endpoints = scenarios.ToAsyncEnumerable() + .Where(static scenario => scenario.FunctionCode is 255) + .SelectAwait(scenario => _manager.FindEndpointByNameAsync(scenario.EndpointName)) + .Where(static endpoint => endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit: OpenNettyUnit }) + .OfType() + .Where(static endpoint => endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)); + + tasks.Add(Parallel.ForEachAsync(endpoints, ReportDerogationModeAsync)); + } + + await Task.WhenAll(tasks); + + async ValueTask ReportDerogationModeAsync(OpenNettyEndpoint endpoint, CancellationToken cancellationToken) => + await _events.PublishAsync(new PilotWireDerogationModeReportedEventArgs(endpoint, + value switch + { + "0" or "32" or "128" => OpenNettyModels.TemperatureControl.PilotWireMode.Comfort, + "1" or "33" or "129" => OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusOne, + "2" or "34" or "130" => OpenNettyModels.TemperatureControl.PilotWireMode.ComfortMinusTwo, + "3" or "35" or "131" => OpenNettyModels.TemperatureControl.PilotWireMode.Eco, + "4" or "36" or "132" => OpenNettyModels.TemperatureControl.PilotWireMode.FrostProtection, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + ushort.Parse(value, CultureInfo.InvariantCulture) switch + { + < 32 => OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.None, + >= 32 and < 128 => OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.FourHours, + >= 128 => OpenNettyModels.TemperatureControl.PilotWireDerogationDuration.EightHours + }), cancellationToken); + break; + } + + case (OpenNettyNotifications.MessageReceived or OpenNettyNotifications.MessageSent, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address, + Mode : OpenNettyMode mode }) + when command == OpenNettyCommands.TemperatureControl.CancelWirePilotDerogationMode: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + List tasks = []; + + if (endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + tasks.Add(_events.PublishAsync(new PilotWireDerogationModeReportedEventArgs(endpoint, null, null)).AsTask()); + } + + if (endpoint is { Unit.Definition.AssociatedUnitId: ushort unit }) + { + tasks.Add(Task.Run(async () => + { + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromNitooAddress( + OpenNettyAddress.ToNitooAddress(address).Identifier, unit)); + + if (endpoint is not null && endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + await _events.PublishAsync(new PilotWireDerogationModeReportedEventArgs(endpoint, null, null)); + } + })); + } + + if (mode is OpenNettyMode.Broadcast or OpenNettyMode.Multicast && endpoint is { Unit.Scenarios: [_, ..] scenarios }) + { + var endpoints = scenarios.ToAsyncEnumerable() + .Where(static scenario => scenario.FunctionCode is 255) + .SelectAwait(scenario => _manager.FindEndpointByNameAsync(scenario.EndpointName)) + .Where(static endpoint => endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit: OpenNettyUnit }) + .OfType() + .Where(static endpoint => endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)); + + tasks.Add(Parallel.ForEachAsync(endpoints, async (endpoint, cancellationToken) => + { + await _events.PublishAsync(new PilotWireDerogationModeReportedEventArgs(endpoint, null, null), cancellationToken); + })); + } + + await Task.WhenAll(tasks); + break; + } + + // Note: the battery level is received when pushing the NETWORK or LEARN buttons on a Zigbee device. + // To receive the battery level during normal operation, the LEARN button on the wireless device + // must be pressed and a CEN+ binding request must be sent by the gateway to bind the two devices. + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Zigbee, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : [{ Length: > 0 } value] }) + when dimension == OpenNettyDimensions.Management.BatteryInformation: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + List tasks = []; + + if (endpoint.HasCapability(OpenNettyCapabilities.Battery)) + { + tasks.Add(ReportBatteryLevelAsync(endpoint, CancellationToken.None).AsTask()); + } + + tasks.Add(Task.Run(async () => + { + // Note: while the battery level is reported using a unit-specific address, it applies to the entire + // device: this task retrieves the device endpoint and, if available, report its battery level. + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromDecimalZigbeeAddress( + OpenNettyAddress.ToZigbeeAddress(address).Identifier)); + + if (endpoint is not null && endpoint.HasCapability(OpenNettyCapabilities.Battery)) + { + await ReportBatteryLevelAsync(endpoint, CancellationToken.None); + } + })); + + async ValueTask ReportBatteryLevelAsync(OpenNettyEndpoint endpoint, CancellationToken cancellationToken) + => await _events.PublishAsync(new BatteryLevelReportedEventArgs(endpoint, value switch + { + "0" => 0, + "1" => 33, + "2" => 66, + "3" => 100, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }), cancellationToken); + break; + } + + // Note: Nitoo radio devices don't directly expose the battery level but send a special frame + // header converted to an OpenWebNet "BATTERY WEAK" BUS COMMAND when the battery is low. + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address }) + when command == OpenNettyCommands.Management.BatteryWeak: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.Battery)) + { + await _events.PublishAsync(new BatteryLevelReportedEventArgs(endpoint, (ushort) 5)); + } + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol: OpenNettyProtocol.Zigbee, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address }) + when command == OpenNettyCommands.Scenario.OpenBinding || + command == OpenNettyCommands.Scenario.CloseBinding: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.ZigbeeBinding)) + { + if (command == OpenNettyCommands.Scenario.OpenBinding) + { + await _events.PublishAsync(new BindingOpenEventArgs(endpoint)); + } + + else + { + await _events.PublishAsync(new BindingClosedEventArgs(endpoint)); + } + } + break; + } + + case (OpenNettyNotifications.MessageReceived, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : [{ Length: > 0 }, { Length: > 0 }, { Length: > 0 }, { Length: > 0 }] values }) + when dimension == OpenNettyDimensions.Diagnostics.DeviceDescription: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + await _events.PublishAsync(new DeviceDescriptionReportedEventArgs(endpoint, + OpenNettyModels.Diagnostics.DeviceDescription.CreateFromDeviceDescription(values))); + break; + } + + case (OpenNettyNotifications.MessageReceived or OpenNettyNotifications.MessageSent, + OpenNettyMessage { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionSet, + Address : OpenNettyAddress address, + Mode : OpenNettyMode.Broadcast, + Dimension: OpenNettyDimension dimension, + Values : [{ Length: > 0 } value, ..] }) + when dimension == OpenNettyDimensions.Lighting.DimmerStep: + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.DimmingScenario)) + { + var step = int.Parse(value, CultureInfo.InvariantCulture); + if (step is >= 128) + { + step -= 256; + } + + await _events.PublishAsync(new DimmingStepReportedEventArgs(endpoint, step)); + } + break; + } + + case (OpenNettyNotification notification, + OpenNettyMessage { Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address, + Mode : OpenNettyMode mode }) + // Note: timed and progressive scenarios are parameterized. + when command == OpenNettyCommands.Scenario.Action || + (command.Category == OpenNettyCategories.Scenarios && + command.Value == OpenNettyCommands.Scenario.ActionForTime.Value && + command.Parameters is [{ Length: > 0 }]) || + (command.Category == OpenNettyCategories.Scenarios && + command.Value == OpenNettyCommands.Scenario.ActionInTime.Value && + command.Parameters is [{ Length: > 0 }]): + { + // Ignore the message if the corresponding endpoint couldn't be resolved or if it + // was received by a different gateway than the one associated with the endpoint. + var endpoint = await _manager.FindEndpointByAddressAsync(address); + if (endpoint is null || (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway)) + { + return; + } + + List tasks = []; + + if (command == OpenNettyCommands.Scenario.Action) + { + if (!endpoint.HasCapability(OpenNettyCapabilities.BasicScenario)) + { + break; + } + + if (mode is OpenNettyMode.Broadcast && notification is OpenNettyNotifications.MessageReceived) + { + tasks.Add(_events.PublishAsync(new BasicScenarioReportedEventArgs(endpoint)).AsTask()); + } + + // Note: since they are radiofrequency devices, the current state of Nitoo wireless burglar alarms + // cannot be retrieved using a unit description request. To inform devices associated using a + // PnL scenario of a state change, Nitoo alarms broadcast it using unit-specific ACTION scenarios. + if (endpoint.HasCapability(OpenNettyCapabilities.WirelessBurglarAlarmScenario)) + { + tasks.Add(Task.Run(async () => + { + var (identifier, unit) = OpenNettyAddress.ToNitooAddress(address); + + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromNitooAddress(identifier)); + if (endpoint is null || !endpoint.HasCapability(OpenNettyCapabilities.WirelessBurglarAlarmState)) + { + return; + } + + var state = unit switch + { + 4 => OpenNettyModels.Alarm.WirelessBurglarAlarmState.Armed, + 5 => OpenNettyModels.Alarm.WirelessBurglarAlarmState.Disarmed, + 6 => OpenNettyModels.Alarm.WirelessBurglarAlarmState.PartiallyArmed, + 7 => OpenNettyModels.Alarm.WirelessBurglarAlarmState.Triggered, + 8 => OpenNettyModels.Alarm.WirelessBurglarAlarmState.ExitDelayElapsed, + 9 => OpenNettyModels.Alarm.WirelessBurglarAlarmState.EventDetected, + _ => null as OpenNettyModels.Alarm.WirelessBurglarAlarmState? + }; + + if (state is not null) + { + await _events.PublishAsync(new WirelessBurglarAlarmStateReportedEventArgs(endpoint, state.Value)); + } + })); + } + } + + else if (command.Value == OpenNettyCommands.Scenario.ActionForTime.Value) + { + if (!endpoint.HasCapability(OpenNettyCapabilities.TimedScenario)) + { + break; + } + + if (notification is OpenNettyNotifications.MessageReceived && mode is OpenNettyMode.Broadcast) + { + var duration = Math.Round(double.Parse(command.Parameters[0], CultureInfo.InvariantCulture) / 5, MidpointRounding.AwayFromZero); + + await _events.PublishAsync(new TimedScenarioReportedEventArgs(endpoint, TimeSpan.FromSeconds(duration))); + } + } + + else if (command.Value == OpenNettyCommands.Scenario.ActionInTime.Value) + { + if (!endpoint.HasCapability(OpenNettyCapabilities.ProgressiveScenario)) + { + break; + } + + if (notification is OpenNettyNotifications.MessageReceived && mode is OpenNettyMode.Broadcast) + { + var duration = Math.Round(double.Parse(command.Parameters[0], CultureInfo.InvariantCulture) / 5, MidpointRounding.AwayFromZero); + + await _events.PublishAsync(new ProgressiveScenarioReportedEventArgs(endpoint, TimeSpan.FromSeconds(duration))); + } + } + + // Note: on endpoints that don't support dimming, a "SCENARIO ACTION", "SCENARIO ACTION IN TIME" or + // "SCENARIO ACTION FOR TIME" BUS COMMAND always results in the associated unit being switched on. + // + // For endpoints that support dimming, the actual brightness level is retrieved asynchronously + // by a dedicated event handler to ensure the exact brightness level is correctly reported. + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState) && + !endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) && + !endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)) + { + tasks.Add(ReportOnStateAsync(endpoint, CancellationToken.None).AsTask()); + } + + if (endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit.Definition.AssociatedUnitId: ushort unit }) + { + tasks.Add(Task.Run(async () => + { + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromNitooAddress( + OpenNettyAddress.ToNitooAddress(address).Identifier, unit)); + + if (endpoint is null || !endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + return; + } + + // Note: on endpoints that don't support dimming, a "SCENARIO ACTION", "SCENARIO ACTION IN TIME" or + // "SCENARIO ACTION FOR TIME" BUS COMMAND always results in the associated unit being switched on. + // + // For endpoints that support dimming, the actual brightness level is retrieved asynchronously + // by a dedicated event handler to ensure the exact brightness level is correctly reported. + if (!endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) && + !endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)) + { + await ReportOnStateAsync(endpoint, CancellationToken.None); + } + })); + } + + if (endpoint is { Unit.Scenarios: [_, ..] scenarios }) + { + tasks.Add(Parallel.ForEachAsync(scenarios, async (scenario, cancellationToken) => + { + var endpoint = await _manager.FindEndpointByNameAsync(scenario.EndpointName, cancellationToken); + if (endpoint is null) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + if (scenario.FunctionCode is 101 or 103 or (>= 0 and <= 100)) + { + await ReportOnStateAsync(endpoint, cancellationToken); + } + + else if (scenario.FunctionCode is 102 or 104) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, + OpenNettyModels.Lighting.SwitchState.Off), cancellationToken); + } + } + + if (endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)) + { + // Note: for Nitoo devices supporting dimming, an ON scenario always changes the brightness to 100%. + if (scenario.FunctionCode is 101 or 103) + { + await _events.PublishAsync(new BrightnessReportedEventArgs(endpoint, 100), cancellationToken); + } + + // Note: the special brightness level "0" always indicates that the output is switched off. + // While Nitoo devices normally don't restore the last known brightness level, the brightness + // level is only reported if it's higher than zero for consistency with MyHome/SCS devices. + else if (scenario.FunctionCode is >= 1 and <= 100) + { + await _events.PublishAsync(new BrightnessReportedEventArgs(endpoint, scenario.FunctionCode), cancellationToken); + } + } + })); + } + + await Task.WhenAll(tasks); + + async ValueTask ReportOnStateAsync(OpenNettyEndpoint endpoint, CancellationToken cancellationToken) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, + OpenNettyModels.Lighting.SwitchState.On), cancellationToken); + + // Note: if the endpoint was configured to use the push-button mode, + // dispatch an "OFF state" event immediately after switching it on. + if (string.Equals(endpoint.GetStringSetting(OpenNettySettings.SwitchMode), "Push button", StringComparison.OrdinalIgnoreCase)) + { + await _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, + OpenNettyModels.Lighting.SwitchState.Off), cancellationToken); + } + } + break; + } + } + }) + .Do(_logger.UnhandledEventHandlerException) + .Retry() + .SubscribeAsync(static message => ValueTask.CompletedTask), + + // Note: this event handler is responsible for retrieving the exact switch state and brightless level of + // endpoints that are directly or indirectly affected by BUS COMMAND or DIMENSION SET frames that don't + // provide accurate information about their state. + await _pipeline.SelectMany(static notification => notification switch + { + // Note: brightness level changes reported via ON% BUS COMMAND frames are deliberately ignored for + // SCS and Zigbee endpoints that support advanced dimming, as this method often gives very imprecise + // results that are inconsistent with the brightness level retrieved using a "DIMMER LEVEL SPEED" + // or "DIMMER STATUS" DIMENSION REQUEST frame (e.g when setting the brightness to 30%, a F418U2 SCS + // dimmer correctly reports the "130" value when using a DIMENSION REQUEST but returns "5" (50%) + // when using a STATUS REQUEST. To avoid that, this event handler monitors all the ON% BUS COMMAND + // frames and retrieves the exact brightness level using a "DIMMER LEVEL SPEED" DIMENSION REQUEST. + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Event, + Message: { + Protocol: OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : not null } message } + when command == OpenNettyCommands.Lighting.On20 || + command == OpenNettyCommands.Lighting.On30 || + command == OpenNettyCommands.Lighting.On40 || + command == OpenNettyCommands.Lighting.On50 || + command == OpenNettyCommands.Lighting.On60 || + command == OpenNettyCommands.Lighting.On70 || + command == OpenNettyCommands.Lighting.On80 || + command == OpenNettyCommands.Lighting.On90 || + command == OpenNettyCommands.Lighting.On100 + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + // Note: some Nitoo devices (like the 067210, 067212 and 067214 dimmers) offer preset buttons that allow + // setting the brightness to a fixed value configured by the user directly on the device. When pressed, + // the dimmer moves to the specified level and emits a SCENARIO ACTION frame but doesn't specify the + // actual value, that must be retrieved separately using a DIMENSION REQUEST to determine the exact level. + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : not null, + Mode : OpenNettyMode.Broadcast } message } + when command == OpenNettyCommands.Scenario.Action + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : not null, + Mode : OpenNettyMode.Broadcast } message } + // Note: timed scenarios are reported using parameterized BUS COMMAND frames. + when command.Category == OpenNettyCategories.Scenarios && + command.Value == OpenNettyCommands.Scenario.ActionForTime.Value && + command.Parameters is [{ Length: > 0 }] + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : not null, + Mode : OpenNettyMode.Broadcast } message } + // Note: progressive scenarios are reported using parameterized BUS COMMAND frames. + when command.Category == OpenNettyCategories.Scenarios && + command.Value == OpenNettyCommands.Scenario.ActionInTime.Value && + command.Parameters is [{ Length: > 0 }] + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + // Nitoo devices allow changing the brightness level of a local unit (and of associated + // devices) by using a long pressure. For that, Nitoo devices broadcast "DIM STEP" + // DIMENSION SET frames until the pressed button is released by the user. While the final + // brightness level can be estimated using the number of "DIM STEP" frames received, this + // method is sadly very imprecise. To avoid that, "DIM STEP" frames are monitored and the + // exact brightness is retrieved from the Nitoo device itself using a DIMENSION REQUEST. + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionSet, + Address : not null, + Mode : OpenNettyMode.Broadcast, + Dimension: OpenNettyDimension dimension } message } + when dimension == OpenNettyDimensions.Lighting.DimmerStep + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + _ => AsyncObservable.Empty<(OpenNettyNotification Notification, OpenNettyMessage Message)>() + }) + .GroupBy(static arguments => arguments.Message.Address!.Value) + .SelectMany(static group => group.Throttle(group.Key.Type switch + { + OpenNettyAddressType.NitooUnit => TimeSpan.FromSeconds(0.5), + not OpenNettyAddressType.NitooUnit => TimeSpan.FromSeconds(1) + })) + .Do(async arguments => + { + await Parallel.ForEachAsync(_manager.FindEndpointsByAddressAsync(arguments.Message.Address!.Value), async (endpoint, cancellationToken) => + { + // Ignore the message if it was received by a different gateway than the one associated with the endpoint. + if (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway) + { + return; + } + + switch (arguments.Message) + { + case { Type: OpenNettyMessageType.BusCommand, Command: OpenNettyCommand command } + when command == OpenNettyCommands.Scenario.Action && !endpoint.HasCapability(OpenNettyCapabilities.BasicScenario): + return; + + case { Type: OpenNettyMessageType.BusCommand, Command: OpenNettyCommand command } + when command == OpenNettyCommands.Lighting.On20 || + command == OpenNettyCommands.Lighting.On30 || + command == OpenNettyCommands.Lighting.On40 || + command == OpenNettyCommands.Lighting.On50 || + command == OpenNettyCommands.Lighting.On60 || + command == OpenNettyCommands.Lighting.On70 || + command == OpenNettyCommands.Lighting.On80 || + command == OpenNettyCommands.Lighting.On90 || + command == OpenNettyCommands.Lighting.On100: + // Note: retrieving the state of an endpoint that doesn't support advanced dimming + // isn't necessary, as the ON% frames are used by the main event handler to change + // the brightless level of endpoints that only support basic dimming. + if (!endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + !endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)) + { + return; + } + break; + + case { Type: OpenNettyMessageType.BusCommand, Command: OpenNettyCommand command } + when command.Category == OpenNettyCategories.Scenarios && + command.Value == OpenNettyCommands.Scenario.ActionForTime.Value && + !endpoint.HasCapability(OpenNettyCapabilities.TimedScenario): + return; + + case { Type: OpenNettyMessageType.BusCommand, Command: OpenNettyCommand command } + when command.Category == OpenNettyCategories.Scenarios && + command.Value == OpenNettyCommands.Scenario.ActionInTime.Value && + !endpoint.HasCapability(OpenNettyCapabilities.ProgressiveScenario): + return; + + case { Type: OpenNettyMessageType.DimensionSet, Dimension: OpenNettyDimension dimension } + when dimension == OpenNettyDimensions.Lighting.DimmerStep && + !endpoint.HasCapability(OpenNettyCapabilities.DimmingScenario): + return; + } + + List tasks = []; + + if (endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)) + { + tasks.Add(_controller.GetBrightnessAsync(endpoint, cancellationToken).AsTask()); + } + + if (endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit.Definition.AssociatedUnitId: ushort unit }) + { + tasks.Add(Task.Run(async () => + { + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromNitooAddress( + OpenNettyAddress.ToNitooAddress(arguments.Message.Address!.Value).Identifier, unit)); + + if (endpoint is null) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)) + { + _ = await _controller.GetBrightnessAsync(endpoint); + } + + // Note: on endpoints that don't support dimming, retrieving their actual status isn't necessary as a + // "SCENARIO ACTION", "SCENARIO ACTION IN TIME" or "SCENARIO ACTION FOR TIME" BUS COMMAND always results + // in the associated unit being switched on, which is a case already handled by the main event handler. + }, cancellationToken)); + } + + if (arguments.Message.Protocol is OpenNettyProtocol.Nitoo && + arguments.Message.Type is OpenNettyMessageType.DimensionSet && + arguments.Message.Dimension == OpenNettyDimensions.Lighting.DimmerStep && + endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit.Scenarios: [_, ..] scenarios }) + { + var endpoints = scenarios.ToAsyncEnumerable() + .Where(static scenario => scenario.FunctionCode is < 105) + .SelectAwait(scenario => _manager.FindEndpointByNameAsync(scenario.EndpointName)) + .Where(static endpoint => endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit: OpenNettyUnit }) + .OfType() + // Note: "DIM STEP" BUS COMMANDS don't have any effect on endpoints that don't support dimming. + .Where(static endpoint => endpoint.HasCapability(OpenNettyCapabilities.BasicDimmingState) || + endpoint.HasCapability(OpenNettyCapabilities.AdvancedDimmingState)); + + tasks.Add(Parallel.ForEachAsync(endpoints, async (endpoint, cancellationToken) => + await _controller.GetBrightnessAsync(endpoint, cancellationToken))); + } + + await Task.WhenAll(tasks); + }); + }) + .Do(_logger.UnhandledEventHandlerException) + .Retry() + .SubscribeAsync(static notification => ValueTask.CompletedTask), + + // Note: this event handler is responsible for reporting state changes of endpoints that received - directly + // or indirectly via a Nitoo PnL scenario - a timed scenario command at the end of the specified duration. + await _pipeline.SelectMany(static notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : not null, + Mode : OpenNettyMode.Broadcast } message } + // Note: timed scenarios are reported using parameterized BUS COMMAND frames. + when command.Category == OpenNettyCategories.Scenarios && + command.Value == OpenNettyCommands.Scenario.ActionForTime.Value && + command.Parameters is [{ Length: > 0 } value] && + Math.Round(double.Parse(value, CultureInfo.InvariantCulture) / 5, MidpointRounding.AwayFromZero) is double duration + => AsyncObservable.Timer(TimeSpan.FromSeconds(duration)).Select(_ => (Notification: notification, Message: message)), + + _ => AsyncObservable.Empty<(OpenNettyNotification Notification, OpenNettyMessage Message)>() + }) + .Do(async arguments => + { + // Ignore the message if no endpoint matching the associated address could be resolved. + var endpoint = await _manager.FindEndpointByAddressAsync(arguments.Message.Address!.Value); + if (endpoint is null) + { + return; + } + + // Ignore the message if it was received by a different gateway than the one associated with the endpoint. + if (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway) + { + return; + } + + if (!endpoint.HasCapability(OpenNettyCapabilities.TimedScenario)) + { + return; + } + + List tasks = []; + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + tasks.Add(ReportOffStateAsync(endpoint, CancellationToken.None).AsTask()); + } + + if (endpoint is { Unit.Definition.AssociatedUnitId: ushort unit }) + { + tasks.Add(Task.Run(async () => + { + var endpoint = await _manager.FindEndpointByAddressAsync(OpenNettyAddress.FromNitooAddress( + OpenNettyAddress.ToNitooAddress(arguments.Message.Address!.Value).Identifier, unit)); + + if (endpoint is null) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)) + { + await ReportOffStateAsync(endpoint, CancellationToken.None); + } + })); + } + + if (endpoint is { Unit.Scenarios: [_, ..] scenarios }) + { + var endpoints = scenarios.ToAsyncEnumerable() + .Where(static scenario => scenario.FunctionCode is < 105) + .SelectAwait(scenario => _manager.FindEndpointByNameAsync(scenario.EndpointName)) + .Where(static endpoint => endpoint is { Protocol: OpenNettyProtocol.Nitoo, Unit: OpenNettyUnit }) + .OfType() + .Where(static endpoint => endpoint.HasCapability(OpenNettyCapabilities.OnOffSwitchState)); + + tasks.Add(Parallel.ForEachAsync(endpoints, ReportOffStateAsync)); + } + + await Task.WhenAll(tasks); + + ValueTask ReportOffStateAsync(OpenNettyEndpoint endpoint, CancellationToken cancellationToken) + => _events.PublishAsync(new SwitchStateReportedEventArgs(endpoint, OpenNettyModels.Lighting.SwitchState.Off), cancellationToken); + }) + .Do(_logger.UnhandledEventHandlerException) + .Retry() + .SubscribeAsync(static notification => ValueTask.CompletedTask), + + // Note: this event handler is responsible for retrieving the pilot wire configuration of an endpoint + // immediately after the setpoint mode or derogation mode was changed via an outgoing BUS COMMAND message. + await _pipeline.SelectMany(static notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : not null, + Mode : OpenNettyMode.Broadcast } message } + // Note: pilot wire mode changes are reported using parameterized BUS COMMAND frames. + when command.Category == OpenNettyCategories.TemperatureControl && + command.Value == OpenNettyCommands.TemperatureControl.WirePilotSetpointMode.Value && + command.Parameters is [{ Length: > 0 }] + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : not null, + Mode : OpenNettyMode.Broadcast } message } + when command == OpenNettyCommands.TemperatureControl.CancelWirePilotDerogationMode + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + OpenNettyNotifications.MessageSent { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : not null } message } + // Note: pilot wire mode changes are emitted using parameterized BUS COMMAND frames. + when command.Category == OpenNettyCategories.TemperatureControl && + command.Value == OpenNettyCommands.TemperatureControl.WirePilotSetpointMode.Value && + command.Parameters is [{ Length: > 0 }] + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + OpenNettyNotifications.MessageSent { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : not null } message } + when command == OpenNettyCommands.TemperatureControl.CancelWirePilotDerogationMode + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + _ => AsyncObservable.Empty<(OpenNettyNotification Notification, OpenNettyMessage Message)>() + }) + .GroupBy(static arguments => arguments.Message.Address) + .SelectMany(static group => group.Throttle(TimeSpan.FromSeconds(0.5))) + .Do(async arguments => + { + // Ignore the message if no endpoint matching the associated address could be resolved. + var endpoint = await _manager.FindEndpointByAddressAsync(arguments.Message.Address!.Value); + if (endpoint is null) + { + return; + } + + // Ignore the message if it was received by a different gateway than the one associated with the endpoint. + if (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.PilotWireHeating)) + { + _ = await _controller.GetPilotWireConfigurationAsync(endpoint); + } + }) + .Do(_logger.UnhandledEventHandlerException) + .Retry() + .SubscribeAsync(static notification => ValueTask.CompletedTask), + + // Note: this event handler is responsible for reporting water heater setpoint mode changes + // that can be inferred when receiving an "off-peak rate" notification from the smart meter. + await _pipeline.SelectMany(static notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRead, + Dimension: OpenNettyDimension dimension, + Address : not null, + Mode : OpenNettyMode.Broadcast, + Values : [{ Length: > 0 }] } message } + when dimension == OpenNettyDimensions.TemperatureControl.SmartMeterRateType + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + _ => AsyncObservable.Empty<(OpenNettyNotification Notification, OpenNettyMessage Message)>() + }) + .GroupBy(static arguments => arguments.Message.Address) + .SelectMany(static group => group.Throttle(TimeSpan.FromSeconds(2.5))) + .Do(async arguments => + { + // Ignore the message if no endpoint matching the associated address could be resolved. + var endpoint = await _manager.FindEndpointByAddressAsync(arguments.Message.Address!.Value); + if (endpoint is null) + { + return; + } + + // Ignore the message if it was received by a different gateway than the one associated with the endpoint. + if (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway) + { + return; + } + + if (!endpoint.HasCapability(OpenNettyCapabilities.SmartMeterInformation)) + { + return; + } + + var endpoints = _manager.EnumerateEndpointsAsync() + .Where(static endpoint => endpoint.Protocol is OpenNettyProtocol.Nitoo) + .Where(static endpoint => endpoint.HasCapability(OpenNettyCapabilities.WaterHeating)) + .Where(endpoint => endpoint.Address is OpenNettyAddress address && + OpenNettyAddress.ToNitooAddress(address) is { Identifier: uint identifier } && + identifier == OpenNettyAddress.ToNitooAddress(arguments.Message.Address!.Value).Identifier); + + await Parallel.ForEachAsync(endpoints, async (endpoint, cancellationToken) => + { + switch (arguments.Message.Values, await _controller.GetUnitDescriptionAsync(endpoint, cancellationToken)) + { + // If the unit description indicates water is not heating when an "off-peak" signal + // is received, this means that production of hot water was turned off by the user. + case (["2"], { FunctionCode: 133, Values: ["0" or "32"] }): + await _events.PublishAsync(new WaterHeaterSetpointModeReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOff), cancellationToken); + break; + + case (["2"], { FunctionCode: 133, Values: ["1" or "33"] }): + await _events.PublishAsync(new WaterHeaterSetpointModeReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode.Automatic), cancellationToken); + break; + + case (["2"], { FunctionCode: 133, Values: ["17"] }): + await _events.PublishAsync(new WaterHeaterSetpointModeReportedEventArgs(endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode.ForcedOn), cancellationToken); + break; + } + }); + }) + .Do(_logger.UnhandledEventHandlerException) + .Retry() + .SubscribeAsync(static notification => ValueTask.CompletedTask), + + // Note: this event handler is responsible for retrieving the water heating state immediately + // after the heating mode was modified by the user via an outgoing DIMENSION SET message. + await _pipeline.SelectMany(static notification => notification switch + { + OpenNettyNotifications.MessageSent { + Session.Type: OpenNettySessionType.Generic, + Message: { + Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionSet, + Dimension: OpenNettyDimension dimension, + Address : not null, + Values : [{ Length: > 0 }] } message } + when dimension == OpenNettyDimensions.TemperatureControl.WaterHeatingMode + => AsyncObservable.Return<(OpenNettyNotification Notification, OpenNettyMessage Message)>((notification, message)), + + _ => AsyncObservable.Empty<(OpenNettyNotification Notification, OpenNettyMessage Message)>() + }) + .GroupBy(static arguments => arguments.Message.Address) + .SelectMany(static group => group.Throttle(TimeSpan.FromSeconds(0.5))) + .Do(async arguments => + { + // Ignore the message if no endpoint matching the associated address could be resolved. + var endpoint = await _manager.FindEndpointByAddressAsync(arguments.Message.Address!.Value); + if (endpoint is null) + { + return; + } + + // Ignore the message if it was received by a different gateway than the one associated with the endpoint. + if (endpoint.Gateway is not null && arguments.Notification.Gateway != endpoint.Gateway) + { + return; + } + + if (endpoint.HasCapability(OpenNettyCapabilities.WaterHeating)) + { + _ = await _controller.GetWaterHeaterStateAsync(endpoint); + } + }) + .Do(_logger.UnhandledEventHandlerException) + .Retry() + .SubscribeAsync(static notification => ValueTask.CompletedTask) + ]); +} diff --git a/src/OpenNetty/OpenNettyDevice.cs b/src/OpenNetty/OpenNettyDevice.cs new file mode 100644 index 0000000..8ce57ea --- /dev/null +++ b/src/OpenNetty/OpenNettyDevice.cs @@ -0,0 +1,83 @@ +using System.Collections.Immutable; + +namespace OpenNetty; + +/// +/// Represents an OpenNetty device. +/// +public sealed class OpenNettyDevice : IEquatable +{ + /// + /// Gets or sets the device definition associated with the device. + /// + public required OpenNettyDeviceDefinition Definition { get; init; } + + /// + /// Gets or sets the serial number associated with the device, + /// if applicable (required for Nitoo and Zigbee devices). + /// + public string? SerialNumber { get; init; } + + /// + /// Gets or sets the user-defined settings associated with the device, if applicable. + /// + public ImmutableDictionary Settings { get; init; } = + ImmutableDictionary.Empty; + + /// + public bool Equals(OpenNettyDevice? other) + { + if (ReferenceEquals(this, other)) + { + return true; + } + + return other is not null && + Definition == other.Definition && + string.Equals(SerialNumber, other.SerialNumber, StringComparison.OrdinalIgnoreCase) && + Settings.Count == other.Settings.Count && !Settings.Except(other.Settings).Any(); + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyDevice device && Equals(device); + + /// + public override int GetHashCode() + { + var hash = new HashCode(); + hash.Add(Definition); + hash.Add(SerialNumber); + + hash.Add(Settings.Count); + foreach (var (name, value) in Settings) + { + hash.Add(name); + hash.Add(value); + } + + return hash.ToHashCode(); + } + + /// + /// Computes the representation of the current device. + /// + /// The representation of the current device. + public override string ToString() => SerialNumber ?? string.Empty; + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyDevice? left, OpenNettyDevice? right) + => ReferenceEquals(left, right) || (left is not null && right is not null && left.Equals(right)); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyDevice? left, OpenNettyDevice? right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyDeviceDefinition.cs b/src/OpenNetty/OpenNettyDeviceDefinition.cs new file mode 100644 index 0000000..6eef59f --- /dev/null +++ b/src/OpenNetty/OpenNettyDeviceDefinition.cs @@ -0,0 +1,152 @@ +using System.Collections.Immutable; +using System.Reactive; + +namespace OpenNetty; + +/// +/// Represents an OpenNetty device definition. +/// +public sealed class OpenNettyDeviceDefinition : IEquatable +{ + /// + /// Gets or sets the capabilities associated with the device definition. + /// + public required ImmutableHashSet Capabilities { get; init; } = []; + + /// + /// Gets or sets the identities associated with the device definition. + /// + public required ImmutableArray Identities { get; init; } + + /// + /// Gets or sets the media associated with the device definition. + /// + public required OpenNettyMedia Media { get; init; } + + /// + /// Gets or sets the protocol associated with the device definition. + /// + public required OpenNettyProtocol Protocol { get; init; } + + /// + /// Gets or sets the OpenNetty-defined settings associated with the device definition. + /// + public ImmutableDictionary Settings { get; init; } = + ImmutableDictionary.Empty; + + /// + /// Gets or sets the unit definitions associated with the device definition. + /// + public required ImmutableArray Units { get; init; } + + /// + public bool Equals(OpenNettyDeviceDefinition? other) + { + if (ReferenceEquals(this, other)) + { + return true; + } + + return other is not null && + Capabilities.Count == other.Capabilities.Count && Capabilities.Except(other.Capabilities).IsEmpty && + Identities.Length == other.Identities.Length && !Identities.Except(other.Identities).Any() && + Media == other.Media && + Protocol == other.Protocol && + Settings.Count == other.Settings.Count && !Settings.Except(other.Settings).Any() && + Units.Length == other.Units.Length && !Units.Except(other.Units).Any(); + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyDeviceDefinition definition && Equals(definition); + + /// + public override int GetHashCode() + { + var hash = new HashCode(); + + hash.Add(Capabilities.Count); + foreach (var capability in Capabilities) + { + hash.Add(capability); + } + + hash.Add(Identities.Length); + foreach (var identity in Identities) + { + hash.Add(identity); + } + + hash.Add(Media); + hash.Add(Protocol); + + hash.Add(Settings.Count); + foreach (var (name, value) in Settings) + { + hash.Add(name); + hash.Add(value); + } + + hash.Add(Units.Length); + foreach (var unit in Units) + { + hash.Add(unit); + } + + return hash.ToHashCode(); + } + + /// + /// Determines whether the device has the specified capability. + /// + /// The capability name. + /// + /// if the device has the specified capability, otherwise. + /// + public bool HasCapability(OpenNettyCapability capability) => Capabilities.Contains(capability); + + /// + /// Determines whether the device has has an identity matching the specified brand and model. + /// + /// The device brand. + /// The device model. + /// + /// if the device has an identity matching the + /// specified brand and model, otherwise. + /// + public bool HasIdentity(OpenNettyBrand brand, string model) + { + if (!Enum.IsDefined(brand)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0006), nameof(brand)); + } + + ArgumentException.ThrowIfNullOrEmpty(model); + + foreach (var identity in Identities) + { + if (identity.Brand == brand && string.Equals(identity.Model, model, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; + } + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyDeviceDefinition? left, OpenNettyDeviceDefinition? right) + => ReferenceEquals(left, right) || (left is not null && right is not null && left.Equals(right)); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyDeviceDefinition? left, OpenNettyDeviceDefinition? right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyDevices.cs b/src/OpenNetty/OpenNettyDevices.cs new file mode 100644 index 0000000..3621b97 --- /dev/null +++ b/src/OpenNetty/OpenNettyDevices.cs @@ -0,0 +1,180 @@ +using System.Collections.Immutable; +using System.Reflection; +using System.Xml.Linq; + +namespace OpenNetty; + +/// +/// Exposes static methods allowing to resolve device or unit definitions +/// of Legrand and BTicino products supported by the OpenNetty library. +/// +public static class OpenNettyDevices +{ + /// + /// Resolves the device definition corresponding to the specified brand and model. + /// + /// The device brand. + /// The device model. + /// + /// The device definition corresponding to the specified brand and model or + /// if the device definition couldn't be found in the database. + /// + /// The model is null or empty or the brand is not valid. + public static OpenNettyDeviceDefinition? GetDeviceByModel(OpenNettyBrand brand, string model) + { + ArgumentException.ThrowIfNullOrEmpty(model); + + if (!Enum.IsDefined(brand)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0006), nameof(brand)); + } + + using var stream = Assembly.GetAssembly(typeof(OpenNettyDevices))?.GetManifestResourceStream( + "OpenNetty.OpenNettyDevices.xml") ?? throw new InvalidOperationException(SR.GetResourceString(SR.ID0073)); + + var document = XDocument.Load(stream); + if (document.Root is null) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0073)); + } + + foreach (var device in document.Root.Elements("Device")) + { + foreach (var identity in device.Elements("Identity")) + { + if ((string) identity.Attribute("Brand")! != Enum.GetName(brand) || + !string.Equals((string) identity.Attribute("Model")!, model, StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + return CreateDeviceDefinition(device); + } + } + + return null; + } + + /// + /// Resolves the unit definition corresponding to the specified brand, model and unit identifier. + /// + /// The device brand. + /// The device model. + /// The unit identifier. + /// + /// The unit definition corresponding to the specified brand, model and unit identifier or + /// if the unit definition couldn't be found in the database. + /// + /// The model is null or empty or the brand is not valid. + /// The unit identifier is out of range. + public static OpenNettyUnitDefinition? GetUnitByModel(OpenNettyBrand brand, string model, ushort id) + { + ArgumentException.ThrowIfNullOrEmpty(model); + ArgumentOutOfRangeException.ThrowIfLessThan(id, 1u); + ArgumentOutOfRangeException.ThrowIfGreaterThan(id, 15u); + + if (!Enum.IsDefined(brand)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0005), nameof(brand)); + } + + using var stream = Assembly.GetAssembly(typeof(OpenNettyDevices))?.GetManifestResourceStream( + "OpenNetty.OpenNettyDevices.xml") ?? throw new InvalidOperationException(SR.GetResourceString(SR.ID0073)); + + var document = XDocument.Load(stream); + if (document.Root is null) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0073)); + } + + foreach (var device in document.Root.Elements("Device")) + { + foreach (var identity in device.Elements("Identity")) + { + if ((string) identity.Attribute("Brand")! != Enum.GetName(brand) || + !string.Equals((string) identity.Attribute("Model")!, model, StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + foreach (var unit in device.Elements("Unit")) + { + if ((uint) unit.Attribute("Id")! != id) + { + continue; + } + + return CreateUnitDefinition(unit); + } + } + } + + return null; + } + + private static OpenNettyDeviceDefinition CreateDeviceDefinition(XElement node) + { + HashSet capabilities = []; + List identities = []; + Dictionary settings = []; + List units = []; + + foreach (var capability in node.Elements("Capability")) + { + capabilities.Add(new OpenNettyCapability((string) capability.Attribute("Name")!)); + } + + foreach (var identity in node.Elements("Identity")) + { + identities.Add(new OpenNettyIdentity + { + Brand = Enum.Parse((string) identity.Attribute("Brand")!), + Model = (string) identity.Attribute("Model")! + }); + } + + foreach (var setting in node.Elements("Setting")) + { + settings.Add(new OpenNettySetting((string) setting.Attribute("Name")!), (string) setting.Attribute("Value")!); + } + + foreach (var unit in node.Elements("Unit")) + { + units.Add(CreateUnitDefinition(unit)); + } + + return new OpenNettyDeviceDefinition + { + Capabilities = [.. capabilities], + Identities = [.. identities], + Media = Enum.Parse((string) node.Attribute("Media")!), + Protocol = Enum.Parse((string) node.Attribute("Protocol")!), + Settings = settings.ToImmutableDictionary(), + Units = [.. units] + }; + } + + private static OpenNettyUnitDefinition CreateUnitDefinition(XElement node) + { + HashSet capabilities = []; + Dictionary settings = []; + + foreach (var capability in node.Elements("Capability")) + { + capabilities.Add(new OpenNettyCapability((string) capability.Attribute("Name")!)); + } + + foreach (var setting in node.Elements("Setting")) + { + settings.Add(new OpenNettySetting((string) setting.Attribute("Name")!), (string) setting.Attribute("Value")!); + } + + return new OpenNettyUnitDefinition + { + AssociatedUnitId = (ushort?) (uint?) node.Attribute("AssociatedUnitId"), + Capabilities = [.. capabilities], + Id = (ushort) (uint) node.Attribute("Id")!, + Settings = settings.ToImmutableDictionary() + }; + } +} diff --git a/src/OpenNetty/OpenNettyDevices.xml b/src/OpenNetty/OpenNettyDevices.xml new file mode 100644 index 0000000..c94eecb --- /dev/null +++ b/src/OpenNetty/OpenNettyDevices.xml @@ -0,0 +1,599 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/OpenNetty/OpenNettyDimension.cs b/src/OpenNetty/OpenNettyDimension.cs new file mode 100644 index 0000000..1ac0965 --- /dev/null +++ b/src/OpenNetty/OpenNettyDimension.cs @@ -0,0 +1,226 @@ +using System.Collections.Immutable; +using System.Diagnostics; +using System.Text; +using static OpenNetty.OpenNettyConstants; + +namespace OpenNetty; + +/// +/// Represents the dimension associated with an OpenNetty message. +/// +[DebuggerDisplay("{ToString(),nq}")] +public readonly struct OpenNettyDimension : IEquatable +{ + /// + /// Creates a new instance of the structure. + /// + /// The category. + /// The value. + public OpenNettyDimension(OpenNettyCategory category, string value) + : this(category, value, []) + { + } + + /// + /// Creates a new instance of the structure. + /// + /// The category. + /// The value. + /// The additional parameters, if applicable. + public OpenNettyDimension(OpenNettyCategory category, string value, ImmutableArray parameters) + { + ArgumentNullException.ThrowIfNull(value); + + // Ensure the value only includes ASCII digits. + foreach (var character in value) + { + if (!char.IsAsciiDigit(character)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0004), nameof(value)); + } + } + + // Ensure the parameters only include ASCII digits. + if (!Parameters.IsDefaultOrEmpty) + { + for (var index = 0; index < parameters.Length; index++) + { + foreach (var character in parameters[index]) + { + if (!char.IsAsciiDigit(character)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0004), nameof(value)); + } + } + } + } + + Category = category; + Value = value; + Parameters = parameters; + } + + /// + /// Gets the category associated with the dimension. + /// + public OpenNettyCategory Category { get; } + + /// + /// Gets the value associated with the dimension. + /// + public string Value { get; } + + /// + /// Gets the additional parameters associated with the dimension, if applicable. + /// + public ImmutableArray Parameters { get; } + + /// + public bool Equals(OpenNettyDimension other) + { + if (Value is null) + { + return other.Value is null; + } + + if (Category != other.Category) + { + return false; + } + + if (!string.Equals(Value, other.Value, StringComparison.Ordinal)) + { + return false; + } + + if (!Parameters.IsDefaultOrEmpty && !other.Parameters.IsDefaultOrEmpty) + { + if (Parameters.Length != other.Parameters.Length) + { + return false; + } + + for (var index = 0; index < Parameters.Length; index++) + { + if (!string.Equals(Parameters[index], other.Parameters[index], StringComparison.Ordinal)) + { + return false; + } + } + } + + else if (Parameters.IsDefaultOrEmpty && !other.Parameters.IsDefaultOrEmpty) + { + return false; + } + + else if (!Parameters.IsDefaultOrEmpty && other.Parameters.IsDefaultOrEmpty) + { + return false; + } + + return true; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyDimension dimension && Equals(dimension); + + /// + public override int GetHashCode() + { + if (Value is null) + { + return 0; + } + + var hash = new HashCode(); + hash.Add(Category); + hash.Add(Value); + + if (!Parameters.IsDefaultOrEmpty) + { + hash.Add(Parameters.Length); + + for (var index = 0; index < Parameters.Length; index++) + { + hash.Add(Parameters[index]); + } + } + + else + { + hash.Add(0); + } + + return hash.ToHashCode(); + } + + /// + /// Computes the representation of the current dimension. + /// + /// The representation of the current dimension. + public override string ToString() + { + if (Value is null) + { + return string.Empty; + } + + if (Parameters.IsDefaultOrEmpty) + { + return Value; + } + + var builder = new StringBuilder(); + builder.Append(Value); + + for (var index = 0; index < Parameters.Length; index++) + { + builder.Append((char) Separators.Hash[0]); + builder.Append(Parameters[index]); + } + + return builder.ToString(); + } + + /// + /// Converts the dimension to a list of . + /// + /// The list of representing this dimension. + public ImmutableArray ToParameters() + { + if (Value is null) + { + return []; + } + + var builder = ImmutableArray.CreateBuilder(); + builder.Add(new OpenNettyParameter(Value)); + + if (!Parameters.IsDefaultOrEmpty) + { + for (var index = 0; index < Parameters.Length; index++) + { + builder.Add(new OpenNettyParameter(Parameters[index])); + } + } + + return builder.ToImmutable(); + } + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyDimension left, OpenNettyDimension right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyDimension left, OpenNettyDimension right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyDimensions.cs b/src/OpenNetty/OpenNettyDimensions.cs new file mode 100644 index 0000000..dbaf8ea --- /dev/null +++ b/src/OpenNetty/OpenNettyDimensions.cs @@ -0,0 +1,141 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty dimensions, as defined by the Nitoo and MyHome specifications. +/// +public static class OpenNettyDimensions +{ + /// + /// Lighting dimensions (WHO = 1). + /// + public static class Lighting + { + /// + /// Dimmer level/speed (DIMENSION = 1). + /// + public static readonly OpenNettyDimension DimmerLevelSpeed = new(OpenNettyCategories.Lighting, "1"); + + /// + /// Dimmer status (DIMENSION = 4). + /// + public static readonly OpenNettyDimension DimmerStatus = new(OpenNettyCategories.Lighting, "4"); + + /// + /// Dimmer step (DIMENSION = 10). + /// + public static readonly OpenNettyDimension DimmerStep = new(OpenNettyCategories.Lighting, "10"); + } + + /// + /// Temperature control dimensions (WHO = 4). + /// + public static class TemperatureControl + { + /// + /// Water heating mode (DIMENSION = 40). + /// + public static readonly OpenNettyDimension WaterHeatingMode = new(OpenNettyCategories.TemperatureControl, "40"); + + /// + /// Smart meter rate type (DIMENSION = 42). + /// + public static readonly OpenNettyDimension SmartMeterRateType = new(OpenNettyCategories.TemperatureControl, "42"); + + /// + /// Smart meter indexes (DIMENSION = 43). + /// + public static readonly OpenNettyDimension SmartMeterIndexes = new(OpenNettyCategories.TemperatureControl, "43"); + } + + /// + /// Management dimensions (WHO = 13). + /// + public static class Management + { + /// + /// Time (DIMENSION = 0). + /// + public static readonly OpenNettyDimension Time = new(OpenNettyCategories.Management, "0"); + + /// + /// Date (DIMENSION = 1). + /// + public static readonly OpenNettyDimension Date = new(OpenNettyCategories.Management, "1"); + + /// + /// IP address (DIMENSION = 10). + /// + public static readonly OpenNettyDimension IpAddress = new(OpenNettyCategories.Management, "10"); + + /// + /// Netmask (DIMENSION = 11). + /// + public static readonly OpenNettyDimension Netmask = new(OpenNettyCategories.Management, "11"); + + /// + /// Firmware version (DIMENSION = 16). + /// + public static readonly OpenNettyDimension FirmwareVersion = new(OpenNettyCategories.Management, "16"); + + /// + /// Hardware version (DIMENSION = 17). + /// + public static readonly OpenNettyDimension HardwareVersion = new(OpenNettyCategories.Management, "17"); + + /// + /// Uptime (DIMENSION = 19). + /// + public static readonly OpenNettyDimension Uptime = new(OpenNettyCategories.Management, "19"); + + /// + /// Date/time (DIMENSION = 22). + /// + public static readonly OpenNettyDimension DateTime = new(OpenNettyCategories.Management, "22"); + + /// + /// Device identifier (DIMENSION = 27). + /// + public static readonly OpenNettyDimension DeviceIdentifier = new(OpenNettyCategories.Management, "27"); + + /// + /// Battery information (DIMENSION = 72). + /// + public static readonly OpenNettyDimension BatteryInformation = new(OpenNettyCategories.Management, "72"); + } + + /// + /// Diagnostics dimensions (WHO = 1000). + /// + public static class Diagnostics + { + /// + /// Device description (DIMENSION = 51). + /// + public static readonly OpenNettyDimension DeviceDescription = new(OpenNettyCategories.Diagnostics, "51"); + + /// + /// Memory data (DIMENSION = 52). + /// + public static readonly OpenNettyDimension MemoryData = new(OpenNettyCategories.Diagnostics, "52"); + + /// + /// Extended memory data (DIMENSION = 53). + /// + public static readonly OpenNettyDimension ExtendedMemoryData = new(OpenNettyCategories.Diagnostics, "53"); + + /// + /// Memory write (DIMENSION = 54). + /// + public static readonly OpenNettyDimension MemoryWrite = new(OpenNettyCategories.Diagnostics, "54"); + + /// + /// Unit description (DIMENSION = 55). + /// + public static readonly OpenNettyDimension UnitDescription = new(OpenNettyCategories.Diagnostics, "55"); + + /// + /// Memory depth (DIMENSION = 56). + /// + public static readonly OpenNettyDimension MemoryDepth = new(OpenNettyCategories.Diagnostics, "56"); + } +} diff --git a/src/OpenNetty/OpenNettyEndpoint.cs b/src/OpenNetty/OpenNettyEndpoint.cs new file mode 100644 index 0000000..c1d411e --- /dev/null +++ b/src/OpenNetty/OpenNettyEndpoint.cs @@ -0,0 +1,240 @@ +using System.Collections.Immutable; +using System.Diagnostics.CodeAnalysis; + +namespace OpenNetty; + +/// +/// Represents an OpenNetty endpoint. +/// +public sealed class OpenNettyEndpoint : IEquatable +{ + /// + /// Gets or sets the address associated with the endpoint, if applicable. + /// + public OpenNettyAddress? Address { get; init; } + + /// + /// Gets or sets the capabilities associated with the endpoint. + /// + public ImmutableHashSet Capabilities { get; init; } = []; + + /// + /// Gets or sets the device associated with the endpoint, if applicable. + /// + public OpenNettyDevice? Device { get; init; } + + /// + /// Gets or sets the gateway that will process messages pointing to this endpoint. + /// + /// + /// Note: incoming frames that point to this endpoint but are not + /// received by the specified gateway will be automatically ignored. + /// + public OpenNettyGateway? Gateway { get; init; } + + /// + /// Gets or sets the media associated with the endpoint. + /// + public OpenNettyMedia? Media { get; init; } + + /// + /// Gets or sets the optional name associated with the endpoint. + /// + public string? Name { get; init; } + + /// + /// Gets or sets the protocol associated with the endpoint. + /// + public required OpenNettyProtocol Protocol { get; init; } + + /// + /// Gets or sets the settings associated with the endpoint. + /// + public ImmutableDictionary Settings { get; init; } = + ImmutableDictionary.Empty; + + /// + /// Gets or sets the unit associated with the endpoint, if applicable. + /// + /// Note: units are only valid for Nitoo or Zigbee endpoints. + public OpenNettyUnit? Unit { get; init; } + + /// + /// Resolves the specified boolean setting from the settings attached + /// to the endpoint (if set) or from the device or unit device objects. + /// + /// The setting name. + /// The boolean setting if it could be found, otherwise. + public bool? GetBooleanSetting(OpenNettySetting setting) + => TryGetSetting(setting, out string? value) && bool.TryParse(value, out bool result) ? result : null; + + /// + /// Resolves the specified string setting from the settings attached + /// to the endpoint (if set) or from the device or unit device objects. + /// + /// The setting name. + /// The string setting if it could be found, otherwise. + public string? GetStringSetting(OpenNettySetting setting) => TryGetSetting(setting, out string? value) ? value : null; + + /// + /// Determines whether the endpoint or the attached unit/device have the specified capability. + /// + /// The capability name. + /// + /// if the endpoint or the attached unit/device + /// have the specified capability, otherwise. + /// + public bool HasCapability(OpenNettyCapability capability) + { + if (Protocol is OpenNettyProtocol.Nitoo or OpenNettyProtocol.Zigbee && + Unit is OpenNettyUnit unit) + { + return unit.Definition.HasCapability(capability); + } + + if (Device is OpenNettyDevice device) + { + return device.Definition.HasCapability(capability); + } + + return Capabilities.Contains(capability); + } + + /// + /// Tries to resolve the specified setting from the settings attached + /// to the endpoint (if set) or from the device or unit device objects. + /// + /// The setting name. + /// The setting value, or if it was not found. + /// if the setting was found, otherwise. + public bool TryGetSetting(OpenNettySetting setting, [NotNullWhen(true)] out string? value) + { + if (Protocol is OpenNettyProtocol.Nitoo or OpenNettyProtocol.Zigbee && + Unit is OpenNettyUnit unit) + { + return unit.Settings.TryGetValue(setting, out value) || + unit.Definition.Settings.TryGetValue(setting, out value) || + Settings.TryGetValue(setting, out value); + } + + else if (Device is OpenNettyDevice device) + { + return device.Settings.TryGetValue(setting, out value) || + device.Definition.Settings.TryGetValue(setting, out value) || + Settings.TryGetValue(setting, out value); + } + + return Settings.TryGetValue(setting, out value); + } + + /// + public bool Equals(OpenNettyEndpoint? other) + { + if (ReferenceEquals(this, other)) + { + return true; + } + + if (other is null) + { + return false; + } + + if (Address != other.Address) + { + return false; + } + + if (Capabilities.Count != other.Capabilities.Count || !Capabilities.Except(other.Capabilities).IsEmpty) + { + return false; + } + + if (Device != other.Device) + { + return false; + } + + if (Media != other.Media) + { + return false; + } + + if (!string.Equals(Name, other.Name, StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + if (Protocol != other.Protocol) + { + return false; + } + + if (Settings.Count != other.Settings.Count || Settings.Except(other.Settings).Any()) + { + return false; + } + + if (Unit != other.Unit) + { + return false; + } + + return true; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyEndpoint endpoint && Equals(endpoint); + + /// + public override int GetHashCode() + { + var hash = new HashCode(); + hash.Add(Address); + + hash.Add(Capabilities.Count); + foreach (var capability in Capabilities) + { + hash.Add(capability); + } + + hash.Add(Device); + hash.Add(Media); + hash.Add(Name); + hash.Add(Protocol); + + hash.Add(Settings.Count); + foreach (var (name, value) in Settings) + { + hash.Add(name); + hash.Add(value); + } + + hash.Add(Unit); + + return hash.ToHashCode(); + } + + /// + /// Computes the representation of the current endpoint. + /// + /// The representation of the current endpoint. + public override string ToString() => Name ?? string.Empty; + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyEndpoint? left, OpenNettyEndpoint? right) + => ReferenceEquals(left, right) || (left is not null && right is not null && left.Equals(right)); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyEndpoint? left, OpenNettyEndpoint? right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyErrorCode.cs b/src/OpenNetty/OpenNettyErrorCode.cs new file mode 100644 index 0000000..f0fb2b6 --- /dev/null +++ b/src/OpenNetty/OpenNettyErrorCode.cs @@ -0,0 +1,77 @@ +namespace OpenNetty; + +/// +/// Represents an OpenNetty error code. +/// +public enum OpenNettyErrorCode +{ + /// + /// The gateway that received the frame rejected it. + /// + InvalidFrame = 0, + + /// + /// The gateway that received the frame was too busy to process the frame. + /// + GatewayBusy = 1, + + /// + /// No worker was active to process the frame. + /// + NoWorkerAvailable = 2, + + /// + /// No acknowledgement frame was received for this trame. + /// + NoAcknowledgementReceived = 3, + + /// + /// The protocol implemented by the gateway doesn't support the requested action. + /// + IncompatibleAction = 4, + + /// + /// An invalid dimension value was specified. + /// + InvalidDimensionValue = 5, + + /// + /// An invalid action was rejected by the remote device. + /// + InvalidAction = 6, + + /// + /// No valid/invalid action frame was received for this trame. + /// + NoActionReceived = 7, + + /// + /// No status reply was received for this status request. + /// + NoStatusReceived = 8, + + /// + /// No dimension frame was received for this dimension request. + /// + NoDimensionReceived = 9, + + /// + /// Authentication was required by the gateway but no password was provided. + /// + AuthenticationRequired = 10, + + /// + /// The authentication method returned by the gateway is not supported. + /// + AuthenticationMethodUnsupported = 11, + + /// + /// The authentication data was rejected by the gateway. + /// + AuthenticationInvalid = 12, + + /// + /// The connection negotiation couldn't be completed in the allowed time frame. + /// + NegotiationTimeout = 13 +} diff --git a/src/OpenNetty/OpenNettyEvents.cs b/src/OpenNetty/OpenNettyEvents.cs new file mode 100644 index 0000000..f98cf10 --- /dev/null +++ b/src/OpenNetty/OpenNettyEvents.cs @@ -0,0 +1,366 @@ +using System.ComponentModel; +using System.Reactive.Concurrency; +using System.Reactive.Linq; +using System.Reactive.Subjects; +using System.Threading.Channels; +using Microsoft.Extensions.Hosting; + +namespace OpenNetty; + +/// +/// Exposes high-level events that are automatically inferred from +/// incoming or outgoing OpenWebNet frames by the OpenNetty coordinator. +/// +public sealed class OpenNettyEvents : IDisposable +{ + private readonly Channel _channel = Channel.CreateUnbounded(); + private readonly IConnectableAsyncObservable _observable; + private readonly CancellationTokenRegistration _registration; + + /// + /// Creates a new instance of the class. + /// + /// The host application lifetime. + public OpenNettyEvents(IHostApplicationLifetime lifetime) + { + _observable = AsyncObservable.Create(observer => + { + return TaskPoolAsyncScheduler.Default.ScheduleAsync(async cancellationToken => + { + while (!cancellationToken.IsCancellationRequested) + { + try + { + if (!await _channel.Reader.WaitToReadAsync(cancellationToken)) + { + await observer.OnCompletedAsync(); + return; + } + + while (_channel.Reader.TryRead(out EventArgs? arguments)) + { + await observer.OnNextAsync(arguments); + } + } + + catch (ChannelClosedException) + { + await observer.OnCompletedAsync(); + return; + } + + catch (Exception exception) + { + await observer.OnErrorAsync(exception); + } + } + }); + }) + .Retry() + .Multicast(new ConcurrentSimpleAsyncSubject()); + + // Marks the channel as completed when the host indicates the application is shutting down. + _registration = lifetime.ApplicationStopping.Register(static state => + ((OpenNettyEvents) state!)._channel.Writer.TryComplete(), this); + } + + /// + /// Gets an event triggered when a basic scenario is reported. + /// + public IAsyncObservable BasicScenarioReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a battery level is reported. + /// + public IAsyncObservable BatteryLevelReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a binding is closed is reported. + /// + public IAsyncObservable BindingClosed + => _observable.OfType(); + + /// + /// Gets an event triggered when a basic scenario is open. + /// + public IAsyncObservable BindingOpen + => _observable.OfType(); + + /// + /// Gets an event triggered when a brightness level is reported. + /// + public IAsyncObservable BrightnessReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a device description is reported. + /// + public IAsyncObservable DeviceDescriptionReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a dimming step is reported. + /// + public IAsyncObservable DimmingStepReported + => _observable.OfType(); + + /// + /// Gets an event triggered when an ON/OFF scenario is reported. + /// + public IAsyncObservable OnOffScenarioReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a pilot wire derogation mode is reported. + /// + public IAsyncObservable PilotWireDerogationModeReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a pilot wire setpoint mode is reported. + /// + public IAsyncObservable PilotWireSetpointModeReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a progressive scenario is reported. + /// + public IAsyncObservable ProgressiveScenarioReported + => _observable.OfType(); + + /// + /// Gets an event triggered when smart meter indexes are reported. + /// + public IAsyncObservable SmartMeterIndexesReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a smart meter power cut mode is reported. + /// + public IAsyncObservable SmartMeterPowerCutModeReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a smart meter rate type is reported. + /// + public IAsyncObservable SmartMeterRateTypeReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a switch state is reported. + /// + public IAsyncObservable SwitchStateReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a timed scenario is reported. + /// + public IAsyncObservable TimedScenarioReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a toggle scenario is reported. + /// + public IAsyncObservable ToggleScenarioReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a water heater setpoint mode is reported. + /// + public IAsyncObservable WaterHeaterSetpointModeReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a water heater state is reported. + /// + public IAsyncObservable WaterHeaterStateReported + => _observable.OfType(); + + /// + /// Gets an event triggered when a wireless burglar alarm state is reported. + /// + public IAsyncObservable WirelessBurglarAlarmStateReported + => _observable.OfType(); + + /// + /// Connects the so that events can start being processed. + /// + /// + /// A that can be used to monitor the asynchronous + /// operation and whose result is used as a signal by the OpenNetty hosted service + /// to inform the pipeline that no additional event will be processed. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public ValueTask ConnectAsync() => _observable.ConnectAsync(); + + /// + public void Dispose() => _registration.Dispose(); + + /// + /// Publishes a new event. + /// + /// The arguments associated with the event. + /// The that can be used to abort the operation. + /// A that can be used to monitor the asynchronous operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public ValueTask PublishAsync(TEventArgs arguments, CancellationToken cancellationToken = default) + where TEventArgs : notnull, EventArgs + => _channel.Writer.WriteAsync(arguments, cancellationToken); + + /// + /// Represents abstract event arguments used by OpenNetty. + /// + /// The endpoint. + public abstract record EventArgs(OpenNettyEndpoint Endpoint); + + /// + /// Represents event arguments used when a basic scenario is reported. + /// + /// The endpoint. + public sealed record BasicScenarioReportedEventArgs(OpenNettyEndpoint Endpoint) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a battery level is reported. + /// + /// The endpoint. + /// The battery level. + public sealed record BatteryLevelReportedEventArgs(OpenNettyEndpoint Endpoint, ushort Level) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a binding is closed. + /// + /// The endpoint. + public sealed record BindingClosedEventArgs(OpenNettyEndpoint Endpoint) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a binding is open. + /// + /// The endpoint. + public sealed record BindingOpenEventArgs(OpenNettyEndpoint Endpoint) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a brightness level is reported. + /// + /// The endpoint. + /// The brightness level, from 0 to 100. + public sealed record BrightnessReportedEventArgs(OpenNettyEndpoint Endpoint, ushort Level) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a device description is reported. + /// + /// The endpoint. + /// The device description. + public sealed record DeviceDescriptionReportedEventArgs(OpenNettyEndpoint Endpoint, + OpenNettyModels.Diagnostics.DeviceDescription Description) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a dimming step is reported. + /// + /// The endpoint. + /// The delta (positive or negative). + public sealed record DimmingStepReportedEventArgs(OpenNettyEndpoint Endpoint, int Delta) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when an ON/OFF scenario is reported. + /// + /// The endpoint. + /// The ON/OFF state. + public sealed record OnOffScenarioReportedEventArgs(OpenNettyEndpoint Endpoint, + OpenNettyModels.Lighting.SwitchState State) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a pilot wire derogation mode is reported. + /// + /// The endpoint. + /// The derogation mode. + /// The derogation duration. + public sealed record PilotWireDerogationModeReportedEventArgs( + OpenNettyEndpoint Endpoint, + OpenNettyModels.TemperatureControl.PilotWireMode? Mode, + OpenNettyModels.TemperatureControl.PilotWireDerogationDuration? Duration) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a pilot wire setpoint mode is reported. + /// + /// The endpoint. + /// The setpoint mode. + public sealed record PilotWireSetpointModeReportedEventArgs( + OpenNettyEndpoint Endpoint, OpenNettyModels.TemperatureControl.PilotWireMode Mode) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a progressive scenario is reported. + /// + /// The endpoint. + /// The scenario duration. + public sealed record ProgressiveScenarioReportedEventArgs(OpenNettyEndpoint Endpoint, TimeSpan Duration) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when smart meter indexes are reported. + /// + /// The endpoint. + /// The indexes. + public sealed record SmartMeterIndexesReportedEventArgs(OpenNettyEndpoint Endpoint, + OpenNettyModels.TemperatureControl.SmartMeterIndexes Indexes) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a smart meter power cut mode is reported. + /// + /// The endpoint. + /// A boolean indicating whether the power cut mode is active or not. + public sealed record SmartMeterPowerCutModeReportedEventArgs(OpenNettyEndpoint Endpoint, bool Active) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a smart meter rate type is reported. + /// + /// The endpoint. + /// The rate type. + public sealed record SmartMeterRateTypeReportedEventArgs(OpenNettyEndpoint Endpoint, + OpenNettyModels.TemperatureControl.SmartMeterRateType Type) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a switch state is reported. + /// + /// The endpoint. + /// The switch state. + public sealed record SwitchStateReportedEventArgs(OpenNettyEndpoint Endpoint, + OpenNettyModels.Lighting.SwitchState State) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a timed scenario is reported. + /// + /// The endpoint. + /// The duration after which associated devices change their state. + public sealed record TimedScenarioReportedEventArgs(OpenNettyEndpoint Endpoint, TimeSpan Duration) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a toggle scenario is reported. + /// + /// The endpoint. + public sealed record ToggleScenarioReportedEventArgs(OpenNettyEndpoint Endpoint) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a water heater setpoint mode is reported. + /// + /// The endpoint. + /// The setpoint mode. + public sealed record WaterHeaterSetpointModeReportedEventArgs(OpenNettyEndpoint Endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterMode Mode) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a water heater state is reported. + /// + /// The endpoint. + /// The state. + public sealed record WaterHeaterStateReportedEventArgs(OpenNettyEndpoint Endpoint, + OpenNettyModels.TemperatureControl.WaterHeaterState State) : EventArgs(Endpoint); + + /// + /// Represents event arguments used when a wireless burglar alarm state is reported. + /// + /// The endpoint. + /// The state. + public sealed record WirelessBurglarAlarmStateReportedEventArgs(OpenNettyEndpoint Endpoint, + OpenNettyModels.Alarm.WirelessBurglarAlarmState State) : EventArgs(Endpoint); +} diff --git a/src/OpenNetty/OpenNettyException.cs b/src/OpenNetty/OpenNettyException.cs new file mode 100644 index 0000000..a02f2ec --- /dev/null +++ b/src/OpenNetty/OpenNettyException.cs @@ -0,0 +1,31 @@ +namespace OpenNetty; + +/// +/// Represents an OpenNetty exception. +/// +public sealed class OpenNettyException : Exception +{ + /// + /// Creates a new instance of the class. + /// + /// The error associated to the exception. + /// The message associated to the exception. + public OpenNettyException(OpenNettyErrorCode code, string? message) + : base(message) + => ErrorCode = code; + + /// + /// Creates a new instance of the class. + /// + /// The error associated to the exception. + /// The message associated to the exception. + /// The inner exception, if available. + public OpenNettyException(OpenNettyErrorCode code, string? message, Exception? innerException) + : base(message, innerException) + => ErrorCode = code; + + /// + /// Gets the error code associated to the exception. + /// + public OpenNettyErrorCode ErrorCode { get; } +} diff --git a/src/OpenNetty/OpenNettyExtensions.cs b/src/OpenNetty/OpenNettyExtensions.cs new file mode 100644 index 0000000..903f479 --- /dev/null +++ b/src/OpenNetty/OpenNettyExtensions.cs @@ -0,0 +1,58 @@ +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; + +namespace Microsoft.Extensions.DependencyInjection; + +/// +/// Exposes extensions allowing to register the OpenNetty services. +/// +public static class OpenNettyExtensions +{ + /// + /// Provides a common entry point for registering the OpenNetty services. + /// + /// The services collection. + /// This extension can be safely called multiple times. + /// The instance. + public static OpenNettyBuilder AddOpenNetty(this IServiceCollection services) + { + ArgumentNullException.ThrowIfNull(services); + + services.AddLogging(); + services.AddOptionsWithValidateOnStart(); + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(typeof(OpenNettyLogger<>)); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + + services.TryAddEnumerable(ServiceDescriptor.Singleton< + IValidateOptions, OpenNettyConfiguration>()); + + services.AddHostedService(); + + return new OpenNettyBuilder(services); + } + + /// + /// Provides a common entry point for registering the OpenNetty services. + /// + /// The services collection. + /// The configuration delegate used to register new services. + /// This extension can be safely called multiple times. + /// The . + public static IServiceCollection AddOpenNetty(this IServiceCollection services, Action configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + configuration(services.AddOpenNetty()); + + return services; + } +} diff --git a/src/OpenNetty/OpenNettyField.cs b/src/OpenNetty/OpenNettyField.cs new file mode 100644 index 0000000..1efb4bb --- /dev/null +++ b/src/OpenNetty/OpenNettyField.cs @@ -0,0 +1,197 @@ +using System.Buffers; +using System.Collections.Immutable; +using System.Diagnostics; +using System.Text; +using static OpenNetty.OpenNettyConstants; + +namespace OpenNetty; + +/// +/// Represents a raw OpenNetty field. +/// +[DebuggerDisplay("{ToString(),nq}")] +public readonly struct OpenNettyField : IEquatable +{ + /// + /// Creates a new instance of . + /// + /// The parameters included in the field. + public OpenNettyField(params IEnumerable parameters) + { + ArgumentNullException.ThrowIfNull(parameters); + + Parameters = [.. parameters]; + } + + /// + /// Creates a new instance of . + /// + /// The parameters included in the field. + public OpenNettyField(params ImmutableArray parameters) => Parameters = parameters; + + /// + /// Gets the raw parameters included in the field. + /// + public ImmutableArray Parameters { get; } + + /// + /// Represents an empty field. + /// + public static readonly OpenNettyField Empty = new([]); + + /// + /// Parses an OpenNetty field from the specified . + /// + /// The UTF-16 string containing the raw field. + /// The OpenNetty field corresponding to the specified . + public static OpenNettyField Parse(string value) => Parse(Encoding.ASCII.GetBytes(value)); + + /// + /// Parses an OpenNetty field from the specified . + /// + /// The ASCII/UTF-8 buffer containing the raw field. + /// The OpenNetty field corresponding to the specified . + public static OpenNettyField Parse(ReadOnlyMemory buffer) => Parse(new ReadOnlySequence(buffer)); + + /// + /// Parses an OpenNetty field from the specified . + /// + /// The ASCII/UTF-8 buffer containing the raw field. + /// The OpenNetty field corresponding to the specified . + public static OpenNettyField Parse(in ReadOnlySequence buffer) + { + // Note: fields can be omitted. In this case, they are represented as empty values. + + var reader = new SequenceReader(buffer); + List? parameters = null; + + do + { + // Try to read until the next '#' (that indicates the next parameter in the frame). + if (reader.TryReadTo(out ReadOnlySpan parameter, Separators.Hash, advancePastDelimiter: true)) + { + // Ensure the next character is not a second '#', which is not valid in a parameter. + if (reader.IsNext(Separators.Hash, advancePast: false)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0005), nameof(buffer)); + } + + parameters ??= new(capacity: 1); + parameters.Add(OpenNettyParameter.Parse(parameter)); + + // If '#' is not followed by any character, add an empty parameter. + if (reader.End) + { + parameters.Add(OpenNettyParameter.Empty); + } + } + + // Try to read until the next '*' (that indicates the next field in the frame). + else if (reader.TryReadTo(out parameter, Separators.Asterisk, advancePastDelimiter: true)) + { + parameters ??= new(capacity: 1); + parameters.Add(OpenNettyParameter.Parse(parameter)); + } + + // If no '#' or '*' can be found, this means there's no additional parameter: + // in this case, return the rest of the frame as a unique parameter. + else + { + parameters ??= new(capacity: 1); + parameters.Add(OpenNettyParameter.Parse(reader.UnreadSpan)); + reader.AdvanceToEnd(); + } + } + + while (!reader.End); + + return new OpenNettyField(parameters); + } + + /// + public bool Equals(OpenNettyField other) + { + if (Parameters.IsDefaultOrEmpty) + { + return other.Parameters.IsDefaultOrEmpty; + } + + if (Parameters.Length != other.Parameters.Length) + { + return false; + } + + for (var index = 0; index < Parameters.Length; index++) + { + if (Parameters[index] != other.Parameters[index]) + { + return false; + } + } + + return true; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyField field && Equals(field); + + /// + public override int GetHashCode() + { + if (Parameters.IsDefaultOrEmpty) + { + return 0; + } + + var hash = new HashCode(); + + for (var index = 0; index < Parameters.Length; index++) + { + hash.Add(Parameters[index]); + } + + return hash.ToHashCode(); + } + + /// + /// Computes the representation of the current field. + /// + /// The representation of the current field. + public override string ToString() + { + if (Parameters.IsDefaultOrEmpty) + { + return string.Empty; + } + + var builder = new StringBuilder(); + + for (var index = 0; index < Parameters.Length; index++) + { + if (index is not 0) + { + builder.Append((char) Separators.Hash[0]); + } + + builder.Append(Parameters[index]); + } + + return builder.ToString(); + } + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyField left, OpenNettyField right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyField left, OpenNettyField right) => !(left == right); +} \ No newline at end of file diff --git a/src/OpenNetty/OpenNettyFrame.cs b/src/OpenNetty/OpenNettyFrame.cs new file mode 100644 index 0000000..da906ea --- /dev/null +++ b/src/OpenNetty/OpenNettyFrame.cs @@ -0,0 +1,194 @@ +using System.Buffers; +using System.Collections.Immutable; +using System.Diagnostics; +using System.Text; +using static OpenNetty.OpenNettyConstants; + +namespace OpenNetty; + +/// +/// Represents a raw OpenWebNet frame. +/// +[DebuggerDisplay("{ToString(),nq}")] +public readonly struct OpenNettyFrame : IEquatable +{ + /// + /// Creates a new instance of . + /// + /// The fields included in the frame. + public OpenNettyFrame(params IEnumerable fields) + { + ArgumentNullException.ThrowIfNull(fields); + + Fields = [.. fields]; + } + + /// + /// Creates a new instance of . + /// + /// The fields included in the frame. + public OpenNettyFrame(params ImmutableArray fields) => Fields = fields; + + /// + /// Gets the raw fields included in the current frame. + /// + public ImmutableArray Fields { get; } + + /// + /// Parses an OpenNetty frame from the specified . + /// + /// The raw frame. + /// The OpenNetty frame corresponding to the specified . + public static OpenNettyFrame Parse(string value) => Parse(Encoding.ASCII.GetBytes(value)); + + /// + /// Parses an OpenNetty frame from the specified . + /// + /// The ASCII/UTF-8 buffer containing the raw frame. + /// The OpenNetty frame corresponding to the specified . + public static OpenNettyFrame Parse(ReadOnlyMemory buffer) => Parse(new ReadOnlySequence(buffer)); + + /// + /// Parses an OpenNetty frame from the specified . + /// + /// The ASCII/UTF-8 buffer containing the raw frame. + /// The OpenNetty frame corresponding to the specified . + public static OpenNettyFrame Parse(in ReadOnlySequence buffer) + { + var reader = new SequenceReader(buffer); + + // Frames MUST always start with '*'. + if (!reader.IsNext(Delimiters.Start, advancePast: true)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0001), nameof(buffer)); + } + + List? fields = null; + + do + { + // Try to read until the next '*' (that indicates the next field in the frame). + if (reader.TryReadTo(out ReadOnlySequence field, Separators.Asterisk, advancePastDelimiter: true)) + { + fields ??= new(capacity: 1); + fields.Add(OpenNettyField.Parse(field)); + } + + // If no '*' can be found, this means there's no additional field: in this case, + // keep reading until the end of the message, indicated by two '#' characters. + else if (reader.TryReadTo(out field, Delimiters.End, advancePastDelimiter: true)) + { + // At this point, we should have reached the end of the message. If this is not the case, + // throw an exception as two consecutive '#' must not appear before the end of the frame. + if (!reader.End) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0002), nameof(buffer)); + } + + fields ??= new(capacity: 1); + fields.Add(OpenNettyField.Parse(field)); + } + + // Frames MUST always end with '##'. + else + { + throw new ArgumentException(SR.GetResourceString(SR.ID0003), nameof(buffer)); + } + } + + while (!reader.End); + + return new OpenNettyFrame(fields); + } + + /// + public bool Equals(OpenNettyFrame other) + { + if (Fields.IsDefaultOrEmpty) + { + return other.Fields.IsDefaultOrEmpty; + } + + if (Fields.Length != other.Fields.Length) + { + return false; + } + + for (var index = 0; index < Fields.Length; index++) + { + if (Fields[index] != other.Fields[index]) + { + return false; + } + } + + return true; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyFrame frame && Equals(frame); + + /// + public override int GetHashCode() + { + if (Fields.IsDefaultOrEmpty) + { + return 0; + } + + var hash = new HashCode(); + + for (var index = 0; index < Fields.Length; index++) + { + hash.Add(Fields[index]); + } + + return hash.ToHashCode(); + } + + /// + /// Computes the representation of the current frame. + /// + /// The representation of the current frame. + public override string ToString() + { + if (Fields.IsDefaultOrEmpty) + { + return string.Empty; + } + + var builder = new StringBuilder(); + builder.Append((char) Delimiters.Start[0]); + + for (var index = 0; index < Fields.Length; index++) + { + if (index is not 0) + { + builder.Append((char) Separators.Asterisk[0]); + } + + builder.Append(Fields[index]); + } + + builder.Append((char) Delimiters.End[0]); + builder.Append((char) Delimiters.End[1]); + + return builder.ToString(); + } + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyFrame left, OpenNettyFrame right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyFrame left, OpenNettyFrame right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyFrames.cs b/src/OpenNetty/OpenNettyFrames.cs new file mode 100644 index 0000000..e0dbb77 --- /dev/null +++ b/src/OpenNetty/OpenNettyFrames.cs @@ -0,0 +1,22 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenWebNet frames, as defined by the Nitoo and MyHome specifications. +/// +public static class OpenNettyFrames +{ + /// + /// ACK frame. + /// + public static readonly OpenNettyFrame Acknowledgement = OpenNettyFrame.Parse("*#*1##"); + + /// + /// BUSY NACK frame (Zigbee-specific). + /// + public static readonly OpenNettyFrame BusyNegativeAcknowledgement = OpenNettyFrame.Parse("*#*6##"); + + /// + /// NACK frame. + /// + public static readonly OpenNettyFrame NegativeAcknowledgement = OpenNettyFrame.Parse("*#*0##"); +} diff --git a/src/OpenNetty/OpenNettyGateway.cs b/src/OpenNetty/OpenNettyGateway.cs new file mode 100644 index 0000000..526f50d --- /dev/null +++ b/src/OpenNetty/OpenNettyGateway.cs @@ -0,0 +1,230 @@ +using System.IO.Ports; +using System.Net; + +namespace OpenNetty; + +/// +/// Represents an OpenNetty gateway. +/// +public sealed class OpenNettyGateway +{ + /// + /// Gets or sets the type of connection used to communicate with the gateway. + /// + public required OpenNettyConnectionType ConnectionType { get; init; } + + /// + /// Gets or sets the device associated with the gateway. + /// + public required OpenNettyDevice Device { get; init; } + + /// + /// Gets or sets the password associated with the gateway, if applicable (SCS only). + /// + public string? Password { get; init; } + + /// + /// Gets the protocol implemented by the gateway. + /// + public OpenNettyProtocol Protocol => Device.Definition.Protocol; + + /// + /// Gets or sets the serial port associated with the gateway, if applicable. + /// + public SerialPort? SerialPort { get; init; } + + /// + /// Gets or sets the unique name associated with the gateway. + /// + public required string Name { get; init; } + + /// + /// Gets or sets the IP endpoint associated with the gateway, if applicable. + /// + public IPEndPoint? IPEndpoint { get; init; } + + /// + /// Gets or sets the options associated with the gateway. + /// + public required OpenNettyGatewayOptions Options { get; init; } + + /// + public bool Equals(OpenNettyGateway? other) + { + if (ReferenceEquals(this, other)) + { + return true; + } + + return other is not null && + ConnectionType == other.ConnectionType && + Device == other.Device && + IPEndpoint == other.IPEndpoint && + string.Equals(Name, other.Name, StringComparison.OrdinalIgnoreCase) && + string.Equals(Password, other.Password, StringComparison.Ordinal) && + Protocol == other.Protocol && + string.Equals(SerialPort?.PortName, other.SerialPort?.PortName, StringComparison.OrdinalIgnoreCase); + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyGateway gateway && Equals(gateway); + + /// + public override int GetHashCode() + { + var hash = new HashCode(); + hash.Add(ConnectionType); + hash.Add(Device); + hash.Add(IPEndpoint); + hash.Add(Name); + hash.Add(Password); + hash.Add(Protocol); + hash.Add(SerialPort?.PortName); + + return hash.ToHashCode(); + } + + /// + /// Computes the representation of the current gateway. + /// + /// The representation of the current gateway. + public override string ToString() => Name; + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyGateway? left, OpenNettyGateway? right) + => ReferenceEquals(left, right) || (left is not null && right is not null && left.Equals(right)); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyGateway? left, OpenNettyGateway? right) => !(left == right); + + /// + /// Creates a new instance of the + /// class using the specified Internet Protocol endpoint. + /// + /// The gateway name. + /// The gateway device. + /// The Internet Protocol endpoint. + /// The authentication password, if applicable. + /// The gateway options. + /// A new instance of the class. + public static OpenNettyGateway Create( + string name, + OpenNettyDevice device, + IPEndPoint endpoint, + string? password = null, + OpenNettyGatewayOptions? options = null) + { + ArgumentNullException.ThrowIfNull(device); + ArgumentNullException.ThrowIfNull(endpoint); + ArgumentException.ThrowIfNullOrEmpty(name); + + return new OpenNettyGateway + { + ConnectionType = OpenNettyConnectionType.Tcp, + Device = device, + IPEndpoint = endpoint, + Name = name, + Options = options ?? OpenNettyGatewayOptions.CreateDefaults(device), + Password = password + }; + } + + /// + /// Creates a new instance of the class using the specified serial port. + /// + /// The gateway name. + /// The gateway device. + /// The serial port. + /// The gateway options. + /// A new instance of the class. + public static OpenNettyGateway Create( + string name, + OpenNettyDevice device, + SerialPort port, + OpenNettyGatewayOptions? options = null) + { + ArgumentNullException.ThrowIfNull(device); + ArgumentNullException.ThrowIfNull(port); + ArgumentException.ThrowIfNullOrEmpty(name); + + return new OpenNettyGateway + { + ConnectionType = OpenNettyConnectionType.Serial, + Device = device, + Name = name, + Options = options ?? OpenNettyGatewayOptions.CreateDefaults(device), + SerialPort = port + }; + } + + /// + /// Creates a new instance of the + /// class using the specified Internet Protocol endpoint. + /// + /// The gateway name. + /// The gateway brand. + /// The gateway model. + /// The Internet Protocol endpoint. + /// The authentication password, if applicable. + /// The gateway options. + /// A new instance of the class. + public static OpenNettyGateway Create( + string name, + OpenNettyBrand brand, + string model, + IPEndPoint endpoint, + string? password = null, + OpenNettyGatewayOptions? options = null) + { + ArgumentNullException.ThrowIfNull(endpoint); + ArgumentException.ThrowIfNullOrEmpty(name); + ArgumentException.ThrowIfNullOrEmpty(model); + + var device = new OpenNettyDevice + { + Definition = OpenNettyDevices.GetDeviceByModel(brand, model) ?? + throw new InvalidOperationException(SR.FormatID0098(brand, model)) + }; + + return Create(name, device, endpoint, password, options); + } + + /// + /// Creates a new instance of the class using the specified serial port. + /// + /// The gateway name. + /// The gateway brand. + /// The gateway model. + /// The serial port. + /// The gateway options. + /// A new instance of the class. + public static OpenNettyGateway Create( + string name, + OpenNettyBrand brand, + string model, + SerialPort port, + OpenNettyGatewayOptions? options = null) + { + ArgumentNullException.ThrowIfNull(port); + ArgumentException.ThrowIfNullOrEmpty(name); + ArgumentException.ThrowIfNullOrEmpty(model); + + var device = new OpenNettyDevice + { + Definition = OpenNettyDevices.GetDeviceByModel(brand, model) ?? + throw new InvalidOperationException(SR.FormatID0098(brand, model)) + }; + + return Create(name, device, port, options); + } +} diff --git a/src/OpenNetty/OpenNettyGatewayOptions.cs b/src/OpenNetty/OpenNettyGatewayOptions.cs new file mode 100644 index 0000000..d23e832 --- /dev/null +++ b/src/OpenNetty/OpenNettyGatewayOptions.cs @@ -0,0 +1,267 @@ +using Polly; +using Polly.Retry; + +namespace OpenNetty; + +/// +/// Provides various settings used to communicate with an OpenNetty gateway. +/// +public sealed record OpenNettyGatewayOptions +{ + /// + /// Gets or sets the action validation timeout (Nitoo only). + /// + public required TimeSpan ActionValidationTimeout { get; init; } + + /// + /// Gets or sets the maximum lifetime of command sessions. + /// + public required TimeSpan CommandSessionMaximumLifetime { get; init; } + + /// + /// Gets or sets the connection negotiation timeout. + /// + public required TimeSpan ConnectionNegotiationTimeout { get; init; } + + /// + /// Gets or sets a boolean indicating whether the supervision mode should be enabled (Zigbee only). + /// + public required bool EnableSupervisionMode { get; init; } + + /// + /// Gets or sets the frame acknowledgement timeout. + /// + public required TimeSpan FrameAcknowledgementTimeout { get; init; } + + /// + /// Gets or sets the maximum number of concurrent command sessions allowed. + /// + public required ushort MaximumConcurrentCommandSessions { get; init; } + + /// + /// Gets or sets the reply timeout used when multiple dimensions should be returned. + /// + public required TimeSpan MultipleDimensionReplyTimeout { get; init; } + + /// + /// Gets or sets the reply timeout used when multiple status replies should be returned. + /// + public required TimeSpan MultipleStatusReplyTimeout { get; init; } + + /// + /// Gets or sets the outgoing message processing timeout. + /// + public required TimeSpan OutgoingMessageProcessingTimeout { get; init; } + + /// + /// Gets or sets the post-sending delay, if applicable. + /// + public required TimeSpan PostSendingDelay { get; init; } + + /// + /// Gets or sets the reply timeout used when a single dimension should be returned. + /// + public required TimeSpan UniqueDimensionReplyTimeout { get; init; } + + /// + /// Gets or sets the reply timeout used when a unique status reply should be returned. + /// + public required TimeSpan UniqueStatusReplyTimeout { get; init; } + + /// + /// Gets or sets the used to manage sessions. + /// + public required ResiliencePipeline SessionResiliencePipeline { get; init; } + + /// + /// Gets or sets the used to send an outgoing message. + /// + public required ResiliencePipeline OutgoingMessageResiliencePipeline { get; init; } + + /// + /// Creates a default instance of the + /// class with default options appropriate for the specified device. + /// + /// The device. + /// A default instance of the class. + public static OpenNettyGatewayOptions CreateDefaults(OpenNettyDevice device) + { + ArgumentNullException.ThrowIfNull(device); + + return new() + { + ActionValidationTimeout = device.Definition.Protocol is OpenNettyProtocol.Nitoo ? TimeSpan.FromSeconds(2) : TimeSpan.Zero, + CommandSessionMaximumLifetime = device.Definition.Protocol is OpenNettyProtocol.Scs ? TimeSpan.FromSeconds(20) : TimeSpan.Zero, + ConnectionNegotiationTimeout = TimeSpan.FromSeconds(10), + EnableSupervisionMode = device.Definition.HasCapability(OpenNettyCapabilities.ZigbeeSupervision), + FrameAcknowledgementTimeout = TimeSpan.FromSeconds(5), + MaximumConcurrentCommandSessions = device.Definition.Protocol is OpenNettyProtocol.Scs ? (ushort) 3 : (ushort) 0, + MultipleDimensionReplyTimeout = device.Definition.Protocol is OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee ? TimeSpan.FromSeconds(10) : TimeSpan.Zero, + MultipleStatusReplyTimeout = device.Definition.Protocol is OpenNettyProtocol.Scs or OpenNettyProtocol.Zigbee ? TimeSpan.FromSeconds(10) : TimeSpan.Zero, + OutgoingMessageProcessingTimeout = TimeSpan.FromSeconds(10), + PostSendingDelay = device.Definition.Protocol is OpenNettyProtocol.Nitoo ? TimeSpan.FromMilliseconds(150) : TimeSpan.Zero, + UniqueDimensionReplyTimeout = TimeSpan.FromSeconds(2), + UniqueStatusReplyTimeout = TimeSpan.FromSeconds(2), + + OutgoingMessageResiliencePipeline = new ResiliencePipelineBuilder().AddRetry(new RetryStrategyOptions + { + DelayGenerator = static arguments => + { + if (!arguments.Context.Properties.TryGetValue( + key : new ResiliencePropertyKey(nameof(OpenNettyTransmissionOptions)), + value: out OpenNettyTransmissionOptions options)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0074)); + } + + // If the post-sending delay was disabled for this transmission, + // use longer pause times when retrying to send a message. + if (options.HasFlag(OpenNettyTransmissionOptions.DisablePostSendingDelay)) + { + return new(arguments.AttemptNumber switch + { + 0 => TimeSpan.FromMilliseconds(200), + 1 => TimeSpan.FromMilliseconds(500), + _ => TimeSpan.FromMilliseconds(1_000) + }); + } + + return new(arguments.AttemptNumber switch + { + 0 => TimeSpan.FromMilliseconds(100), + 1 => TimeSpan.FromMilliseconds(300), + _ => TimeSpan.FromMilliseconds(800) + }); + }, + // Note: this setting is deliberately set to the maximum value allowed + // to be able to define it dynamically in the ShouldHandle delegate. + MaxRetryAttempts = int.MaxValue, + ShouldHandle = static arguments => + { + if (!arguments.Context.Properties.TryGetValue( + key: new ResiliencePropertyKey(nameof(OpenNettyGateway)), + value: out OpenNettyGateway? gateway)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0074)); + } + + if (!arguments.Context.Properties.TryGetValue( + key: new ResiliencePropertyKey>(nameof(OpenNettyLogger)), + value: out OpenNettyLogger? logger)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0074)); + } + + if (!arguments.Context.Properties.TryGetValue( + key : new ResiliencePropertyKey(nameof(OpenNettyMessage)), + value: out OpenNettyMessage? message)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0074)); + } + + if (!arguments.Context.Properties.TryGetValue( + key : new ResiliencePropertyKey(nameof(OpenNettyTransmissionOptions)), + value: out OpenNettyTransmissionOptions options)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0074)); + } + + // Never retransmit a message if no exception was thrown. + if (arguments.Outcome.Exception is null) + { + return ValueTask.FromResult(false); + } + + logger.MessageErrored(arguments.Outcome.Exception, message, gateway); + + return ValueTask.FromResult(arguments.Outcome.Exception switch + { + // Nitoo gateways are known for returning NACK frames when sending multiple messages + // in a row. In this case, always retry sending the message 3 times before giving up. + OpenNettyException { ErrorCode: OpenNettyErrorCode.InvalidFrame } + when message.Protocol is OpenNettyProtocol.Nitoo => arguments.AttemptNumber is < 3, + + // In large Zigbee networks, Zigbee gateways can sometimes return BUSY NACK frames + // when the network is overloaded (e.g when sending multiple broadcast frames). + // In this case, always retry sending the message twice before giving up. + OpenNettyException { ErrorCode: OpenNettyErrorCode.InvalidFrame or OpenNettyErrorCode.GatewayBusy } + when message.Protocol is OpenNettyProtocol.Zigbee => arguments.AttemptNumber is < 2, + + // SCS gateways are less easily overloaded. As such, retry sending the message only once before giving up. + OpenNettyException { ErrorCode: OpenNettyErrorCode.InvalidFrame } + when message.Protocol is OpenNettyProtocol.Scs => arguments.AttemptNumber is < 1, + + // For messages sent via powerline or radio (that are prone to interference), always retry + // twice if the error was caused by a missing reply from the end device, unless the sender + // explicitly specified that unsafe retransmissions are not allowed for this message. + OpenNettyException { ErrorCode: OpenNettyErrorCode.NoActionReceived or + OpenNettyErrorCode.NoDimensionReceived or + OpenNettyErrorCode.NoStatusReceived } + when message.Media is OpenNettyMedia.Powerline or OpenNettyMedia.Radio + => arguments.AttemptNumber is < 2 && !options.HasFlag(OpenNettyTransmissionOptions.DisallowRetransmissions), + + // For messages sent via a dedicated bus, retry only once if the error was caused + // by a missing reply from the end device, unless the sender explicitly specified + // that unsafe retransmissions are not allowed for this message. + OpenNettyException { ErrorCode: OpenNettyErrorCode.InvalidFrame or OpenNettyErrorCode.GatewayBusy } + when message.Media is OpenNettyMedia.Bus + => arguments.AttemptNumber is < 1 && !options.HasFlag(OpenNettyTransmissionOptions.DisallowRetransmissions), + + _ => false + }); + }, + OnRetry = static arguments => + { + if (!arguments.Context.Properties.TryGetValue( + key : new ResiliencePropertyKey(nameof(OpenNettyGateway)), + value: out OpenNettyGateway? gateway)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0074)); + } + + if (!arguments.Context.Properties.TryGetValue( + key : new ResiliencePropertyKey>(nameof(OpenNettyLogger)), + value: out OpenNettyLogger? logger)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0074)); + } + + if (!arguments.Context.Properties.TryGetValue( + key : new ResiliencePropertyKey(nameof(OpenNettyMessage)), + value: out OpenNettyMessage? message)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0074)); + } + + logger.MessageRetransmitted(message, gateway, (uint) arguments.AttemptNumber + 1); + + return ValueTask.CompletedTask; + } + }).Build(), + + SessionResiliencePipeline = new ResiliencePipelineBuilder() + .AddRetry(new RetryStrategyOptions + { + MaxRetryAttempts = int.MaxValue, + ShouldHandle = static arguments => ValueTask.FromResult( + !arguments.Context.CancellationToken.IsCancellationRequested) + }) + .AddRetry(new RetryStrategyOptions + { + DelayGenerator = static arguments => new(arguments.AttemptNumber switch + { + 0 or 1 => TimeSpan.FromSeconds(1), + 2 or 3 => TimeSpan.FromSeconds(5), + 4 or 5 => TimeSpan.FromSeconds(10), + 6 or 7 or 8 or 9 => TimeSpan.FromSeconds(30), + _ => TimeSpan.FromSeconds(60) + }), + MaxRetryAttempts = int.MaxValue, + ShouldHandle = static arguments => ValueTask.FromResult( + !arguments.Context.CancellationToken.IsCancellationRequested && + arguments.Outcome.Exception is not null) + }) + .Build() + }; + } +} diff --git a/src/OpenNetty/OpenNettyHelpers.cs b/src/OpenNetty/OpenNettyHelpers.cs new file mode 100644 index 0000000..1aaf50c --- /dev/null +++ b/src/OpenNetty/OpenNettyHelpers.cs @@ -0,0 +1,317 @@ +using System.Collections.Concurrent; + +namespace OpenNetty; + +/// +/// Exposes common helpers used by the OpenNetty assemblies. +/// +internal static class OpenNettyHelpers +{ + /// + /// Converts an async-observable sequence to an async-enumerable sequence. + /// + /// The type of the elements in the source sequence. + /// Async-observable sequence to convert to an async-enumerable sequence. + /// The async-enumerable sequence whose elements are pulled from the given async-observable sequence. + /// is null. + public static IAsyncEnumerable ToAsyncEnumerable(this IAsyncObservable source) + { + ArgumentNullException.ThrowIfNull(source); + + return new AsyncObservableAsyncEnumerable(source); + } + + // REVIEW: The base class below was introduced to avoid the overhead of storing a field of type TSource if the + // value of the iterator can trivially be inferred from another field (e.g. in Repeat). It is also used + // by the Defer operator in System.Interactive.Async. For some operators such as Where, Skip, Take, and + // Concat, it could be used to retrieve the value from the underlying enumerator. However, performance + // of this approach is a bit worse in some cases, so we don't go ahead with it for now. One decision to + // make is whether it's okay for Current to throw an exception when MoveNextAsync returns false, e.g. + // by omitting a null check for an enumerator field. + + internal abstract partial class AsyncIteratorBase : IAsyncEnumerable, IAsyncEnumerator + { + private readonly int _threadId; + + protected AsyncIteratorState _state = AsyncIteratorState.New; + protected CancellationToken _cancellationToken; + + protected AsyncIteratorBase() + { + _threadId = Environment.CurrentManagedThreadId; + } + + public IAsyncEnumerator GetAsyncEnumerator(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); // NB: [LDM-2018-11-28] Equivalent to async iterator behavior. + + var enumerator = _state == AsyncIteratorState.New && _threadId == Environment.CurrentManagedThreadId + ? this + : Clone(); + + enumerator._state = AsyncIteratorState.Allocated; + enumerator._cancellationToken = cancellationToken; + + // REVIEW: If the final interface contains a CancellationToken here, should we check for a cancellation request + // either here or in the first call to MoveNextAsync? + + return enumerator; + } + + public virtual ValueTask DisposeAsync() + { + _state = AsyncIteratorState.Disposed; + + return default; + } + + public abstract TSource Current { get; } + + public async ValueTask MoveNextAsync() + { + // Note: MoveNext *must* be implemented as an async method to ensure + // that any exceptions thrown from the MoveNextCore call are handled + // by the try/catch, whether they're sync or async + + if (_state == AsyncIteratorState.Disposed) + { + return false; + } + + try + { + return await MoveNextCore().ConfigureAwait(false); + } + catch + { + await DisposeAsync().ConfigureAwait(false); + throw; + } + } + + public abstract AsyncIteratorBase Clone(); + + protected abstract ValueTask MoveNextCore(); + } + + internal abstract class AsyncIterator : AsyncIteratorBase + { + protected TSource _current = default!; + + public override TSource Current => _current; + + public override ValueTask DisposeAsync() + { + _current = default!; + + return base.DisposeAsync(); + } + } + + internal enum AsyncIteratorState + { + New = 0, + Allocated = 1, + Iterating = 2, + Disposed = -1, + } + + private sealed class AsyncObservableAsyncEnumerable : AsyncIterator, IAsyncObserver + { + private readonly IAsyncObservable _source; + + private ConcurrentQueue? _values = new(); + private Exception? _error; + private bool _completed; + private TaskCompletionSource? _signal; + private IAsyncDisposable? _subscription; + private CancellationTokenRegistration _ctr; + + public AsyncObservableAsyncEnumerable(IAsyncObservable source) => _source = source; + + public override AsyncIteratorBase Clone() => new AsyncObservableAsyncEnumerable(_source); + + protected override async ValueTask MoveNextCore() + { + // + // REVIEW: How often should we check? At the very least, we want to prevent + // subscribing if cancellation is requested. A case may be made to + // check for each iteration, namely because this operator is a bridge + // with another interface. However, we also wire up cancellation to + // the observable subscription, so there's redundancy here. + // + _cancellationToken.ThrowIfCancellationRequested(); + + switch (_state) + { + case AsyncIteratorState.Allocated: + // + // NB: Breaking change to align with lazy nature of async iterators. + // + // In previous implementations, the Subscribe call happened during + // the call to GetAsyncEnumerator. + // + // REVIEW: Confirm this design point. This implementation is compatible + // with an async iterator using "yield return", e.g. subscribing + // to the observable sequence and yielding values out of a local + // queue filled by observer callbacks. However, it departs from + // the dual treatment of Subscribe/GetEnumerator. + // + + _subscription = await _source.SubscribeAsync(this); + _ctr = _cancellationToken.Register(async () => await OnCanceledAsync()); + _state = AsyncIteratorState.Iterating; + goto case AsyncIteratorState.Iterating; + + case AsyncIteratorState.Iterating: + while (true) + { + var completed = Volatile.Read(ref _completed); + + if (_values!.TryDequeue(out _current!)) + { + return true; + } + else if (completed) + { + var error = _error; + + if (error != null) + { + throw error; + } + + return false; + } + + await Resume().ConfigureAwait(false); + Volatile.Write(ref _signal, null); + } + } + + await DisposeAsync().ConfigureAwait(false); + return false; + } + + public async ValueTask OnCompletedAsync() + { + Volatile.Write(ref _completed, true); + + await DisposeSubscriptionAsync(); + OnNotification(); + } + + public async ValueTask OnErrorAsync(Exception error) + { + _error = error; + Volatile.Write(ref _completed, true); + + await DisposeSubscriptionAsync(); + OnNotification(); + } + + public ValueTask OnNextAsync(TSource value) + { + _values?.Enqueue(value); + + OnNotification(); + + return default; + } + + private void OnNotification() + { + while (true) + { + var signal = Volatile.Read(ref _signal); + + if (signal == TaskExt.True) + { + return; + } + + if (signal != null) + { + signal.TrySetResult(true); + return; + } + + if (Interlocked.CompareExchange(ref _signal, TaskExt.True, null) == null) + { + return; + } + } + } + + public override async ValueTask DisposeAsync() + { + await _ctr.DisposeAsync(); + await DisposeSubscriptionAsync(); + + _values = null; + _error = null; + } + + private ValueTask DisposeSubscriptionAsync() => Interlocked.Exchange(ref _subscription, null)?.DisposeAsync() ?? default; + + private async ValueTask OnCanceledAsync() + { + var cancelledTcs = default(TaskCompletionSource); + + await DisposeAsync(); + + while (true) + { + var signal = Volatile.Read(ref _signal); + + if (signal != null) + { + if (signal.TrySetCanceled(_cancellationToken)) + return; + } + + if (cancelledTcs == null) + { + cancelledTcs = new TaskCompletionSource(); + cancelledTcs.TrySetCanceled(_cancellationToken); + } + + if (Interlocked.CompareExchange(ref _signal, cancelledTcs, signal) == signal) + return; + } + } + + private Task Resume() + { + TaskCompletionSource? newSignal = null; + + while (true) + { + var signal = Volatile.Read(ref _signal); + + if (signal != null) + { + return signal.Task; + } + + newSignal ??= new TaskCompletionSource(); + + if (Interlocked.CompareExchange(ref _signal, newSignal, null) == null) + { + return newSignal.Task; + } + } + } + } + + internal static class TaskExt + { + public static readonly TaskCompletionSource True; + + static TaskExt() + { + True = new TaskCompletionSource(); + True.SetResult(true); + } + } +} diff --git a/src/OpenNetty/OpenNettyHostedService.cs b/src/OpenNetty/OpenNettyHostedService.cs new file mode 100644 index 0000000..6e874b2 --- /dev/null +++ b/src/OpenNetty/OpenNettyHostedService.cs @@ -0,0 +1,127 @@ +using System.ComponentModel; +using System.Reactive.Disposables; +using System.Reactive.Linq; +using System.Threading.Channels; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Options; + +namespace OpenNetty; + +/// +/// Contains the logic necessary to connect the event pipeline when the application is starting up. +/// +[EditorBrowsable(EditorBrowsableState.Never)] +public class OpenNettyHostedService : BackgroundService +{ + private readonly OpenNettyEvents _events; + private readonly IEnumerable _handlers; + private readonly OpenNettyLogger _logger; + private readonly IOptionsMonitor _options; + private readonly IOpenNettyPipeline _pipeline; + private readonly IOpenNettyWorker _worker; + + /// + /// Creates a new instance of the class. + /// + /// The OpenNetty events. + /// The OpenNetty handlers. + /// The OpenNetty logger. + /// The OpenNetty options. + /// The OpenNetty pipeline. + /// The OpenNetty worker. + public OpenNettyHostedService( + OpenNettyEvents events, + IEnumerable handlers, + OpenNettyLogger logger, + IOptionsMonitor options, + IOpenNettyPipeline pipeline, + IOpenNettyWorker worker) + { + _events = events ?? throw new ArgumentNullException(nameof(events)); + _handlers = handlers ?? throw new ArgumentNullException(nameof(handlers)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); + _worker = worker ?? throw new ArgumentNullException(nameof(worker)); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + _logger.HostedServiceStarting(); + + await using var subscriptions = new CompositeAsyncDisposable(); + + List tasks = []; + + try + { + // Always invoke the handlers registered in the dependency injection container before notifications + // start being processed to avoid race conditions and ensure no notification will be missed. + foreach (var handler in _handlers) + { + await subscriptions.AddAsync(await handler.SubscribeAsync()); + } + + foreach (var gateway in _options.CurrentValue.Gateways) + { + // Create the unbounded channel that will be used to push notifications to the worker. + var output = Channel.CreateUnbounded(new UnboundedChannelOptions + { + AllowSynchronousContinuations = false, + SingleReader = false, + SingleWriter = true + }); + + // Create the unbounded channel that will be used to receive notifications from the worker. + var input = Channel.CreateUnbounded(new UnboundedChannelOptions + { + AllowSynchronousContinuations = false, + SingleReader = true, + SingleWriter = false + }); + + // Monitor all the notifications that should be handled by the worker and copy them to the output channel. + await subscriptions.AddAsync(await _pipeline + .Where(notification => notification.Gateway == gateway) + .Do(notification => output.Writer.WriteAsync(notification)) + .Retry() + .SubscribeAsync(static notification => ValueTask.CompletedTask)); + + // Monitor all the notifications pushed by the worker and dispatch them using the events pipeline. + tasks.Add(Task.Run(async () => + { + while (await input.Reader.WaitToReadAsync(stoppingToken)) + { + while (input.Reader.TryRead(out OpenNettyNotification? notification)) + { + await _pipeline.PublishAsync(notification, stoppingToken); + } + } + }, stoppingToken)); + + // Ask the worker to process incoming and outgoing notifications for this gateway. + tasks.Add(_worker.ProcessNotificationsAsync(gateway, output.Reader, input.Writer, stoppingToken)); + } + + // Connect the observable instances to allow observers to start processing notifications. + await subscriptions.AddAsync(await _events.ConnectAsync()); + await subscriptions.AddAsync(await _pipeline.ConnectAsync()); + + _logger.HostedServiceStarted(); + + await Task.WhenAll(tasks); + } + + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + _logger.HostedServiceStopped(); + } + + catch (Exception exception) + { + _logger.HostedServiceFailed(exception); + + throw; + } + } +} diff --git a/src/OpenNetty/OpenNettyIdentity.cs b/src/OpenNetty/OpenNettyIdentity.cs new file mode 100644 index 0000000..0f00be1 --- /dev/null +++ b/src/OpenNetty/OpenNettyIdentity.cs @@ -0,0 +1,50 @@ +namespace OpenNetty; + +/// +/// Represents an OpenNetty identity that uniquely +/// identifies a specific Legrand/BTicino product. +/// +public readonly struct OpenNettyIdentity : IEquatable +{ + /// + /// Gets or sets the brand. + /// + public required OpenNettyBrand Brand { get; init; } + + /// + /// Gets or sets the model. + /// + public required string Model { get; init; } + + /// + public bool Equals(OpenNettyIdentity other) => Brand == other.Brand && + string.Equals(Model, other.Model, StringComparison.OrdinalIgnoreCase); + + /// + public override bool Equals(object? obj) => obj is OpenNettyIdentity identity && Equals(identity); + + /// + public override int GetHashCode() => HashCode.Combine(Brand, Model); + + /// + /// Computes the representation of the current identity. + /// + /// The representation of the current identity. + public override string ToString() => $"{Enum.GetName(Brand)} {Model}"; + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyIdentity left, OpenNettyIdentity right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyIdentity left, OpenNettyIdentity right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyLogger.cs b/src/OpenNetty/OpenNettyLogger.cs new file mode 100644 index 0000000..8d3a002 --- /dev/null +++ b/src/OpenNetty/OpenNettyLogger.cs @@ -0,0 +1,229 @@ +using System.ComponentModel; +using Microsoft.Extensions.Logging; + +namespace OpenNetty; + +/// +/// Contains methods used to log strongly-typed messages. +/// +/// The generic typed used to infer a category name. +[EditorBrowsable(EditorBrowsableState.Never)] +public partial class OpenNettyLogger +{ + private readonly ILogger _logger; + + /// + /// Creates a new instance of the class. + /// + /// The logger. + public OpenNettyLogger(ILogger logger) + => _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + /// + /// Logs a message indicating that the hosted service is starting. + /// + [LoggerMessage( + EventId = 6000, + Level = LogLevel.Information, + Message = "The OpenNetty hosted service is starting.")] + public partial void HostedServiceStarting(); + + /// + /// Logs a message indicating that the hosted service has started. + /// + [LoggerMessage( + EventId = 6001, + Level = LogLevel.Information, + Message = "The OpenNetty hosted service has successfully started.")] + public partial void HostedServiceStarted(); + + /// + /// Logs a message indicating that the hosted service has stopped. + /// + [LoggerMessage( + EventId = 6002, + Level = LogLevel.Information, + Message = "The OpenNetty hosted service has successfully stopped.")] + public partial void HostedServiceStopped(); + + /// + /// Logs a message indicating that an exception error occurred while starting the hosted service. + /// + /// The exception. + [LoggerMessage( + EventId = 6003, + Level = LogLevel.Critical, + Message = "The OpenNetty hosted service has failed due to an unexpected exception.")] + public partial void HostedServiceFailed(Exception exception); + + /// + /// Logs a message indicating that a worker is starting. + /// + /// The gateway. + [LoggerMessage( + EventId = 6004, + Level = LogLevel.Information, + Message = "The worker associated to the gateway {Gateway} is starting.")] + public partial void WorkerStarting(OpenNettyGateway gateway); + + /// + /// Logs a message indicating that a worker has started. + /// + /// The gateway. + [LoggerMessage( + EventId = 6005, + Level = LogLevel.Information, + Message = "The worker associated to the gateway {Gateway} has successfully started.")] + public partial void WorkerStarted(OpenNettyGateway gateway); + + /// + /// Logs a message indicating that a long-lived task runner was scheduled. + /// + /// The gateway. + /// The session type. + [LoggerMessage( + EventId = 6006, + Level = LogLevel.Information, + Message = "A task runner was successfully scheduled for gateway {Gateway} (session type: {Type}).")] + public partial void TaskRunnerScheduled(OpenNettyGateway gateway, OpenNettySessionType type); + + /// + /// Logs a message indicating that a new session was open. + /// + /// The gateway. + /// The session type. + /// The session. + [LoggerMessage( + EventId = 6007, + Level = LogLevel.Debug, + Message = "A new session of type {Type} was open to gateway {Gateway}: {Session}.")] + public partial void SessionOpen(OpenNettyGateway gateway, OpenNettySessionType type, OpenNettySession session); + + /// + /// Logs a message indicating that a session was closed. + /// + /// The session. + [LoggerMessage( + EventId = 6008, + Level = LogLevel.Debug, + Message = "The session {Session} was closed.")] + public partial void SessionClosed(OpenNettySession session); + + /// + /// Logs a message indicating that an incoming message was received. + /// + /// The message. + /// The gateway. + /// The session. + [LoggerMessage( + EventId = 6009, + Level = LogLevel.Debug, + Message = "An incoming message was received by gateway {Gateway} using session {Session}: {Message}.")] + public partial void MessageReceived(OpenNettyMessage message, OpenNettyGateway gateway, OpenNettySession session); + + /// + /// Logs a message indicating that an outgoing message was sent. + /// + /// The message. + /// The gateway. + /// The session. + [LoggerMessage( + EventId = 6010, + Level = LogLevel.Debug, + Message = "An outgoing message was successfully sent by gateway {Gateway} using session {Session}: {Message}.")] + public partial void MessageSent(OpenNettyMessage message, OpenNettyGateway gateway, OpenNettySession session); + + /// + /// Logs a message indicating that an outgoing message couldn't be sent because the gateway was too busy. + /// + /// The message. + /// The gateway. + /// The session. + [LoggerMessage( + EventId = 6011, + Level = LogLevel.Information, + Message = "An outgoing message couldn't be sent by gateway {Gateway} using session {Session} because the gateway was too busy: {Message}.")] + public partial void GatewayBusy(OpenNettyMessage message, OpenNettyGateway gateway, OpenNettySession session); + + /// + /// Logs a message indicating that an outgoing message was rejected by the end device. + /// + /// The message. + /// The gateway. + /// The session. + [LoggerMessage( + EventId = 6012, + Level = LogLevel.Information, + Message = "An outgoing message was sent by gateway {Gateway} using session {Session} but was rejected by the end device: {Message}.")] + public partial void InvalidAction(OpenNettyMessage message, OpenNettyGateway gateway, OpenNettySession session); + + /// + /// Logs a message indicating that an outgoing message couldn't be sent because no action validation frame was received. + /// + /// The message. + /// The gateway. + /// The session. + [LoggerMessage( + EventId = 6013, + Level = LogLevel.Information, + Message = "An outgoing message was sent by gateway {Gateway} using session {Session} but was not acknowledged by the end device: {Message}.")] + public partial void NoActionReceived(OpenNettyMessage message, OpenNettyGateway gateway, OpenNettySession session); + + /// + /// Logs a message indicating that an outgoing message couldn't be sent because no acknowledgment frame was received. + /// + /// The message. + /// The gateway. + /// The session. + [LoggerMessage( + EventId = 6014, + Level = LogLevel.Information, + Message = "An outgoing message couldn't be sent by gateway {Gateway} using session {Session} as no acknowledgement frame was received: {Message}.")] + public partial void NoAcknowledgementReceived(OpenNettyMessage message, OpenNettyGateway gateway, OpenNettySession session); + + /// + /// Logs a message indicating that an outgoing message was rejected by the gateway. + /// + /// The message. + /// The gateway. + /// The session. + [LoggerMessage( + EventId = 6015, + Level = LogLevel.Information, + Message = "An outgoing message was rejected by gateway {Gateway} using session {Session}: {Message}.")] + public partial void InvalidFrame(OpenNettyMessage message, OpenNettyGateway gateway, OpenNettySession session); + + /// + /// Logs a message indicating that an outgoing message was not successfully sent. + /// + /// The exception. + /// The message. + /// The gateway. + [LoggerMessage( + EventId = 6016, + Level = LogLevel.Information, + Message = "An error occurred while sending a message to gateway {Gateway}: {Message}.")] + public partial void MessageErrored(Exception exception, OpenNettyMessage message, OpenNettyGateway gateway); + + /// + /// Logs a message indicating that an outgoing message will be retransmitted. + /// + /// The message. + /// The gateway. + /// The attempt number. + [LoggerMessage( + EventId = 6017, + Level = LogLevel.Information, + Message = "A message will be retransmitted by gateway {Gateway}: {Message} (attempt number n°{Attempt}).")] + public partial void MessageRetransmitted(OpenNettyMessage message, OpenNettyGateway gateway, uint attempt); + + /// + /// Logs a message indicating that an exception occurred in a Reactive Extensions event handler. + /// + /// The exception. + [LoggerMessage( + EventId = 6018, + Level = LogLevel.Warning, + Message = "An unhandled exception occurred in a Reactive Extensions event handler.")] + public partial void UnhandledEventHandlerException(Exception exception); +} diff --git a/src/OpenNetty/OpenNettyManager.cs b/src/OpenNetty/OpenNettyManager.cs new file mode 100644 index 0000000..605d498 --- /dev/null +++ b/src/OpenNetty/OpenNettyManager.cs @@ -0,0 +1,299 @@ +using System.Runtime.CompilerServices; +using Microsoft.Extensions.Options; + +namespace OpenNetty; + +/// +/// Provides an easy way to resolve endpoints based on their name or address. +/// +public class OpenNettyManager +{ + private readonly IOptionsMonitor _options; + + /// + /// Creates a new instance of the class. + /// + /// The OpenNetty options. + public OpenNettyManager(IOptionsMonitor options) + => _options = options ?? throw new ArgumentNullException(nameof(options)); + + /// + /// Iterates all the endpoints registered in the options. + /// + /// The that can be used to abort the operation. + /// + /// An that can be used to iterate the endpoints registered in the options. + /// + public virtual async IAsyncEnumerable EnumerateEndpointsAsync( + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await foreach (var endpoint in _options.CurrentValue.Endpoints.ToAsyncEnumerable()) + { + yield return endpoint; + } + } + + /// + /// Iterates all the gateways registered in the options. + /// + /// The that can be used to abort the operation. + /// + /// An that can be used to iterate the gateways registered in the options. + /// + public virtual async IAsyncEnumerable EnumerateGatewaysAsync( + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await foreach (var gateway in _options.CurrentValue.Gateways.ToAsyncEnumerable()) + { + yield return gateway; + } + } + + /// + /// Resolves an endpoint using the specified name. + /// + /// The endpoint name. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and whose result + /// contains the resolved endpoint, or if no matching endpoint could be resolved. + /// + public virtual ValueTask FindEndpointByNameAsync( + string name, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrEmpty(name); + + OpenNettyEndpoint? endpoint = null; + + for (var index = 0; index < _options.CurrentValue.Endpoints.Count; index++) + { + if (cancellationToken.IsCancellationRequested) + { + return ValueTask.FromCanceled(cancellationToken); + } + + if (string.Equals(_options.CurrentValue.Endpoints[index].Name, name, StringComparison.OrdinalIgnoreCase)) + { + if (endpoint is not null) + { + return ValueTask.FromException(new InvalidOperationException( + "Multiple endpoints matching the specified address exist.")); + } + + endpoint = _options.CurrentValue.Endpoints[index]; + } + } + + return ValueTask.FromResult(endpoint); + } + + /// + /// Resolves an endpoint using the specified address. + /// + /// The endpoint address. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and whose result + /// contains the resolved endpoint, or if no matching endpoint could be resolved. + /// + public virtual ValueTask FindEndpointByAddressAsync( + OpenNettyAddress address, CancellationToken cancellationToken = default) + { + OpenNettyEndpoint? endpoint = null; + + for (var index = 0; index < _options.CurrentValue.Endpoints.Count; index++) + { + if (cancellationToken.IsCancellationRequested) + { + return ValueTask.FromCanceled(cancellationToken); + } + + if (_options.CurrentValue.Endpoints[index].Address == address) + { + if (endpoint is not null) + { + return ValueTask.FromException(new InvalidOperationException( + "Multiple endpoints matching the specified address exist.")); + } + + endpoint = _options.CurrentValue.Endpoints[index]; + } + } + + return ValueTask.FromResult(endpoint); + } + + /// + /// Resolves all the endpoints matching the specified address. + /// + /// The endpoint address. + /// The that can be used to abort the operation. + /// + /// An that can be used to iterate the endpoints associated with the address. + /// + public virtual async IAsyncEnumerable FindEndpointsByAddressAsync( + OpenNettyAddress address, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + if (address.Type is OpenNettyAddressType.ScsLightPointArea) + { + var (extension, area) = OpenNettyAddress.ToScsLightPointAreaAddress(address); + + await foreach (var endpoint in EnumerateEndpointsAsync(cancellationToken)) + { + if (endpoint.Protocol is not OpenNettyProtocol.Scs || endpoint.Address is null) + { + continue; + } + + if (endpoint.Address == address) + { + yield return endpoint; + } + + switch (endpoint.Address?.Type) + { + case OpenNettyAddressType.ScsLightPointArea when + OpenNettyAddress.ToScsLightPointAreaAddress(endpoint.Address.Value) is var comparand && + comparand.Extension == extension && comparand.Area == area: + yield return endpoint; + break; + + case OpenNettyAddressType.ScsLightPointPointToPoint when + OpenNettyAddress.ToScsLightPointPointToPointAddress(endpoint.Address.Value) is var comparand && + comparand.Extension == extension && comparand.Area == area: + yield return endpoint; + break; + } + } + } + + else if (address.Type is OpenNettyAddressType.ScsLightPointGeneral) + { + var extension = OpenNettyAddress.ToScsLightPointGeneralAddress(address); + + await foreach (var endpoint in EnumerateEndpointsAsync(cancellationToken)) + { + if (endpoint.Protocol is not OpenNettyProtocol.Scs || endpoint.Address is null) + { + continue; + } + + if (endpoint.Address == address) + { + yield return endpoint; + } + + switch (endpoint.Address?.Type) + { + case OpenNettyAddressType.ScsLightPointArea when + OpenNettyAddress.ToScsLightPointAreaAddress(endpoint.Address.Value) is var comparand && + comparand.Extension == extension: + yield return endpoint; + break; + + case OpenNettyAddressType.ScsLightPointGeneral when + OpenNettyAddress.ToScsLightPointGeneralAddress(endpoint.Address.Value) is var comparand && + comparand == extension: + yield return endpoint; + break; + + case OpenNettyAddressType.ScsLightPointPointToPoint when + OpenNettyAddress.ToScsLightPointPointToPointAddress(endpoint.Address.Value) is var comparand && + comparand.Extension == extension: + yield return endpoint; + break; + } + } + } + + else if (address.Type is OpenNettyAddressType.ZigbeeAllDevicesAllUnits or + OpenNettyAddressType.ZigbeeAllDevicesSpecificUnit or + OpenNettyAddressType.ZigbeeSpecificDeviceAllUnits) + { + await foreach (var endpoint in EnumerateEndpointsAsync(cancellationToken)) + { + if (endpoint.Protocol is not OpenNettyProtocol.Zigbee || endpoint.Address is null) + { + continue; + } + + if (endpoint.Address == address) + { + yield return endpoint; + } + + if (MatchesZigbeeAddress(address, endpoint.Address.Value)) + { + yield return endpoint; + } + } + } + + else + { + await foreach (var endpoint in EnumerateEndpointsAsync(cancellationToken)) + { + if (endpoint.Address is not null && endpoint.Address == address) + { + yield return endpoint; + } + } + } + + static bool MatchesZigbeeAddress(OpenNettyAddress left, OpenNettyAddress right) + { + var first = OpenNettyAddress.ToZigbeeAddress(left); + var second = OpenNettyAddress.ToZigbeeAddress(right); + + if (first is { Identifier: null, Unit: not 0 }) + { + return second.Unit == first.Unit; + } + + else if (first is { Identifier: null, Unit: 0 }) + { + return true; + } + + return false; + } + } + + /// + /// Resolves a gateway using the specified name. + /// + /// The gateway name. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and whose result + /// contains the resolved gateway, or if no matching gateway could be resolved. + /// + public virtual ValueTask FindGatewayByNameAsync( + string name, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrEmpty(name); + + OpenNettyGateway? gateway = null; + + for (var index = 0; index < _options.CurrentValue.Gateways.Count; index++) + { + if (cancellationToken.IsCancellationRequested) + { + return ValueTask.FromCanceled(cancellationToken); + } + + if (string.Equals(_options.CurrentValue.Gateways[index].Name, name, StringComparison.OrdinalIgnoreCase)) + { + if (gateway is not null) + { + return ValueTask.FromException(new InvalidOperationException( + "Multiple gateways matching the specified address exist.")); + } + + gateway = _options.CurrentValue.Gateways[index]; + } + } + + return ValueTask.FromResult(gateway); + } +} diff --git a/src/OpenNetty/OpenNettyMedia.cs b/src/OpenNetty/OpenNettyMedia.cs new file mode 100644 index 0000000..97a0974 --- /dev/null +++ b/src/OpenNetty/OpenNettyMedia.cs @@ -0,0 +1,27 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty transmission media, as defined by the Nitoo and MyHome specifications. +/// +public enum OpenNettyMedia +{ + /// + /// Bus (used in the MyHome Up products). + /// + Bus = 0, + + /// + /// Infrared (used in the In One by Legrand IR and CPL products). + /// + Infrared = 1, + + /// + /// Powerline (used in the In One by Legrand PLC products). + /// + Powerline = 2, + + /// + /// Radio (used in the In One by Legrand RF and MyHome Play products). + /// + Radio = 3 +} diff --git a/src/OpenNetty/OpenNettyMessage.cs b/src/OpenNetty/OpenNettyMessage.cs new file mode 100644 index 0000000..052925d --- /dev/null +++ b/src/OpenNetty/OpenNettyMessage.cs @@ -0,0 +1,749 @@ +using System.Collections.Immutable; +using System.Diagnostics; +using System.Globalization; + +namespace OpenNetty; + +/// +/// Represents a OpenNetty message. +/// +[DebuggerDisplay("{Frame,nq} ({Type,nq})")] +public sealed class OpenNettyMessage : IEquatable +{ + /// + /// Gets the address, if applicable. + /// + public OpenNettyAddress? Address { get; private set; } + + /// + /// Gets the category of the message, if applicable. + /// + public OpenNettyCategory? Category { get; private set; } + + /// + /// Gets the command (or status), if applicable. + /// + public OpenNettyCommand? Command { get; private set; } + + /// + /// Gets the dimension, if applicable. + /// + public OpenNettyDimension? Dimension { get; private set; } + + /// + /// Gets the raw representation of this message. + /// + public OpenNettyFrame Frame { get; private set; } + + /// + /// Gets the transmission media used to receive or send the message, if applicable. + /// + public OpenNettyMedia? Media { get; private set; } + + /// + /// Gets the transmission mode used to receive or send the message, if applicable. + /// + public OpenNettyMode? Mode { get; private set; } + + /// + /// Gets the protocol. + /// + public OpenNettyProtocol Protocol { get; private set; } + + /// + /// Gets the type of the message. + /// + public OpenNettyMessageType Type { get; private set; } + + /// + /// Gets the values associated with the message, if applicable. + /// + public ImmutableArray Values { get; private set; } = []; + + /// + /// Creates a new instance of the class. + /// + private OpenNettyMessage() + { + } + + /// + /// Creates a new instance of using the specified raw frame. + /// + /// The OpenNetty protocol. + /// The raw OpenWebNet frame. + public static OpenNettyMessage CreateFromFrame(OpenNettyProtocol protocol, string frame) + { + ArgumentException.ThrowIfNullOrEmpty(frame); + + return CreateFromFrame(protocol, OpenNettyFrame.Parse(frame)); + } + + /// + /// Creates a new instance of using the specified raw frame. + /// + /// The OpenNetty protocol. + /// The raw OpenWebNet frame. + public static OpenNettyMessage CreateFromFrame(OpenNettyProtocol protocol, OpenNettyFrame frame) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + var message = new OpenNettyMessage + { + Protocol = protocol, + Frame = frame, + Type = frame.Fields switch + { + // Acknowledgement messages MUST have exactly 2 fields: + // + // - the first one MUST contain exactly 2 empty parameters. + // - the second one MUST contain a valid acknowledgement type (0, 1 or 6). + // + // Note: "6" is only valid for MyHome Play (Zigbee-based) interfaces. + [ + { Parameters: [{ IsEmpty: true }, { IsEmpty: true }] }, + { Parameters: [{ Value: "0" }] } + ] => OpenNettyMessageType.NegativeAcknowledgement, + + [ + { Parameters: [{ IsEmpty: true }, { IsEmpty: true }] }, + { Parameters: [{ Value: "1" }] } + ] => OpenNettyMessageType.Acknowledgement, + + [ + { Parameters: [{ IsEmpty: true }, { IsEmpty: true }] }, + { Parameters: [{ Value: "6" }] } + ] => protocol is OpenNettyProtocol.Zigbee ? + OpenNettyMessageType.BusyNegativeAcknowledgement : + throw new InvalidOperationException(SR.GetResourceString(SR.ID0065)), + + // Status request messages MUST have exactly 2 fields: + // + // - WHO, that MUST contain exactly 1 empty and 1 non-empty parameters (i.e MUST be prefixed by a #). + // - WHERE, that MUST contain 1 or more parameter(s), possibly empty. + [ + { Parameters: [{ IsEmpty: true }, { IsEmpty: false }] }, + { Parameters: [ _ , .. ] } + ] => OpenNettyMessageType.StatusRequest, + + // Command/status messages MUST have exactly 3 fields: + // + // - WHO, that MUST contain exactly 1 non-empty parameter (i.e CANNOT be prefixed by a #). + // - WHAT, that MUST start with 1 non-empty parameter (i.e CANNOT be prefixed by a #). + // - WHERE, that MUST contain 1 or more parameter(s), possibly empty. + [ + { Parameters: [{ IsEmpty: false }] }, + { Parameters: [{ IsEmpty: false }, ..] }, + { Parameters: [ _ , ..] } + ] => OpenNettyMessageType.BusCommand, + + // Dimension request messages MUST have exactly 3 fields: + // + // - WHO, that MUST contain exactly 1 empty and 1 non-empty parameters (i.e MUST be prefixed by a #). + // - WHERE, that MUST contain 1 or more parameter(s), possibly empty. + // - DIMENSION, that MUST start with 1 non-empty parameter (i.e CANNOT be prefixed by a #). + [ + { Parameters: [{ IsEmpty: true }, { IsEmpty: false }] }, + { Parameters: [ _ , .. ] }, + { Parameters: [{ IsEmpty: false }, .. ] } + ] => OpenNettyMessageType.DimensionRequest, + + // Dimension read messages MUST have at least 4 fields: + // + // - WHO, that MUST contain exactly 1 empty and 1 non-empty parameters (i.e MUST be prefixed by a #). + // - WHERE, that MUST contain 1 or more parameter(s), possibly empty. + // - DIMENSION, that MUST start with 1 non-empty parameter (i.e CANNOT be prefixed by a #). + // - 1 or more VALUE fields. + [ + { Parameters: [{ IsEmpty: true }, { IsEmpty: false }] }, + { Parameters: [ _ , .. ] }, + { Parameters: [{ IsEmpty: false }, .. ] }, + { Parameters: [ _ , .. ] }, + .. + ] => OpenNettyMessageType.DimensionRead, + + // Dimension write messages MUST have at least 4 fields: + // + // - WHO, that MUST contain exactly 1 empty and 1 non-empty parameters (i.e MUST be prefixed by a #). + // - WHERE, that MUST contain 1 or more parameter(s), possibly empty. + // - DIMENSION, that MUST start with 1 empty parameter (i.e MUST be prefixed by a #). + // - 1 or more VALUE fields. + [ + { Parameters: [{ IsEmpty: true }, { IsEmpty: false }] }, + { Parameters: [ _ , .. ] }, + { Parameters: [{ IsEmpty: true }, .. ] }, + { Parameters: [ _ , .. ] }, + .. + ] => OpenNettyMessageType.DimensionSet, + + _ => OpenNettyMessageType.Unknown + } + }; + + // Then, infer the category from the WHO field, if applicable. + if (message.Type is OpenNettyMessageType.BusCommand or + OpenNettyMessageType.StatusRequest or + OpenNettyMessageType.DimensionRequest or + OpenNettyMessageType.DimensionRead or + OpenNettyMessageType.DimensionSet) + { + var parameters = new List(capacity: frame.Fields[0].Parameters.Length - 1); + + // If the message is not a command, the WHO field is always prefixed by a #. + for ( + var index = message.Type is OpenNettyMessageType.BusCommand ? 1 : 2; + index < frame.Fields[0].Parameters.Length; + index++) + { + parameters.Add(frame.Fields[0].Parameters[index].Value); + } + + message.Category = new OpenNettyCategory(message.Type is OpenNettyMessageType.BusCommand ? + frame.Fields[0].Parameters[0].Value : + frame.Fields[0].Parameters[1].Value, [.. parameters]); + } + + // Then, infer the command/status from the WHAT field if the message is a command. + if (message.Type is OpenNettyMessageType.BusCommand) + { + // In most cases, the WHAT field doesn't include any extra parameter. + var field = frame.Fields[1]; + if (field.Parameters.Length is 1) + { + message.Command = new OpenNettyCommand(message.Category!.Value, field.Parameters[0].Value); + } + + else + { + var parameters = new List(capacity: field.Parameters.Length - 1); + + for (var index = 1; index < field.Parameters.Length; index++) + { + parameters.Add(field.Parameters[index].Value); + } + + message.Command = new OpenNettyCommand(message.Category!.Value, field.Parameters[0].Value, [.. parameters]); + } + } + + // Then, infer the address from the WHERE field, if applicable. + if (message.Type is OpenNettyMessageType.BusCommand or + OpenNettyMessageType.StatusRequest or + OpenNettyMessageType.DimensionRequest or + OpenNettyMessageType.DimensionRead or + OpenNettyMessageType.DimensionSet) + { + // For commands, the WHERE field is always the 3rd field but may be empty (e.g for management frames). + // + // In this case, the gateway itself is assumed to be the recipient of the received frame. + var field = frame.Fields[message.Type is OpenNettyMessageType.BusCommand ? 2 : 1]; + + if (protocol is OpenNettyProtocol.Scs) + { + if (field.Parameters is not [{ IsEmpty: true }]) + { + message.Media = OpenNettyMedia.Bus; + + var type = field.Parameters[0] switch + { + { Value: "0" } when message.Category == OpenNettyCategories.Lighting || + message.Category == OpenNettyCategories.Automation + => OpenNettyAddressType.ScsLightPointGeneral, + + { Value: "00" or "1" or "2" or "3" or "4" or "5" or "6" or "7" or "8" or "9" or "10" } + when message.Category == OpenNettyCategories.Lighting || + message.Category == OpenNettyCategories.Automation + => OpenNettyAddressType.ScsLightPointArea, + + { IsEmpty: true } when message.Category == OpenNettyCategories.Lighting || + message.Category == OpenNettyCategories.Automation + => OpenNettyAddressType.ScsLightPointGroup, + + _ when message.Category == OpenNettyCategories.Lighting || + message.Category == OpenNettyCategories.Automation + => OpenNettyAddressType.ScsLightPointPointToPoint, + + _ => OpenNettyAddressType.Unknown + }; + + if (field.Parameters.Length is 1) + { + message.Address = new OpenNettyAddress(type, field.Parameters[0].Value); + } + + else + { + var parameters = new List(capacity: field.Parameters.Length - 1); + + for (var index = 1; index < field.Parameters.Length; index++) + { + parameters.Add(field.Parameters[index].Value); + } + + message.Address = new OpenNettyAddress(type, field.Parameters[0].Value, [.. parameters]); + } + } + } + + else if (protocol is OpenNettyProtocol.Zigbee) + { + // The WHERE field of Zigbee-based frames can contain up to 3 parameters: + // + // - TRANSMISSION MODE: not set for unicast, empty for multicast, 0 for broadcast. + // - ADDRESS: the address of the Zigbee device and/or the targeted unit. + // - FAMILY TYPE: the family type of the target device (always 9 for Zigbee devices). + + (message.Mode, message.Address, message.Media) = field.Parameters switch + { + [{ IsEmpty: true }] => (null as OpenNettyMode?, null as OpenNettyAddress?, null as OpenNettyMedia?), + + [{ Value: "0" }, { Value: var address }, { Value: "9" }] => (OpenNettyMode.Broadcast, CreateAddress(address), OpenNettyMedia.Radio), + [{ IsEmpty: true }, { Value: var address }, { Value: "9" }] => (OpenNettyMode.Multicast, CreateAddress(address), OpenNettyMedia.Radio), + [{ Value: var address }, { Value: "9" }] => (OpenNettyMode.Unicast, CreateAddress(address), OpenNettyMedia.Radio), + + _ => throw new InvalidOperationException(SR.GetResourceString(SR.ID0066)) + }; + + static OpenNettyAddress? CreateAddress(string? address) => address switch + { + null or { Length: 0 } => null, + + { Length: 2 } value when value is "00" => new OpenNettyAddress(OpenNettyAddressType.ZigbeeAllDevicesAllUnits, value), + { Length: 2 } value => new OpenNettyAddress(OpenNettyAddressType.ZigbeeAllDevicesSpecificUnit, value), + { Length: > 2 } value when value[^2..] is "00" => new OpenNettyAddress(OpenNettyAddressType.ZigbeeSpecificDeviceAllUnits, value), + { Length: > 2 } value => new OpenNettyAddress(OpenNettyAddressType.ZigbeeSpecificDeviceSpecificUnit, value), + + _ => throw new InvalidOperationException(SR.GetResourceString(SR.ID0066)) + }; + } + + else if (protocol is OpenNettyProtocol.Nitoo) + { + // The WHERE field of Nitoo-based frames can contain up to 3 parameters: + // + // - TRANSMISSION MODE: not set for unicast, empty for multicast, 0 for broadcast. + // - ADDRESS: the address of the Nitoo device and the targeted unit. + // - FAMILY TYPE: the family type of the target device. + // + // Note: powerline is always the default value when no family type is explicitly set. + + (message.Mode, message.Address, message.Media) = field.Parameters switch + { + [{ IsEmpty: true }] => (null as OpenNettyMode?, null as OpenNettyAddress?, null as OpenNettyMedia?), + + [{ Value: "0" }, { Value: var address }] => (OpenNettyMode.Broadcast, CreateAddress(address), OpenNettyMedia.Powerline), + [{ Value: "0" }, { Value: var address }, { Value: "0" }] => (OpenNettyMode.Broadcast, CreateAddress(address), OpenNettyMedia.Powerline), + [{ Value: "0" }, { Value: var address }, { Value: "1" }] => (OpenNettyMode.Broadcast, CreateAddress(address), OpenNettyMedia.Radio), + [{ Value: "0" }, { Value: var address }, { Value: "2" }] => (OpenNettyMode.Broadcast, CreateAddress(address), OpenNettyMedia.Infrared), + + [{ IsEmpty: true }, { Value: var address }] => (OpenNettyMode.Multicast, CreateAddress(address), OpenNettyMedia.Powerline), + [{ IsEmpty: true }, { Value: var address }, { Value: "0" }] => (OpenNettyMode.Multicast, CreateAddress(address), OpenNettyMedia.Powerline), + [{ IsEmpty: true }, { Value: var address }, { Value: "1" }] => (OpenNettyMode.Multicast, CreateAddress(address), OpenNettyMedia.Radio), + [{ IsEmpty: true }, { Value: var address }, { Value: "2" }] => (OpenNettyMode.Multicast, CreateAddress(address), OpenNettyMedia.Infrared), + + [{ Value: var address }] => (OpenNettyMode.Unicast, CreateAddress(address), OpenNettyMedia.Powerline), + [{ Value: var address }, { Value: "0" }] => (OpenNettyMode.Unicast, CreateAddress(address), OpenNettyMedia.Powerline), + [{ Value: var address }, { Value: "1" }] => (OpenNettyMode.Unicast, CreateAddress(address), OpenNettyMedia.Radio), + [{ Value: var address }, { Value: "2" }] => (OpenNettyMode.Unicast, CreateAddress(address), OpenNettyMedia.Infrared), + + _ => throw new InvalidOperationException(SR.GetResourceString(SR.ID0066)) + }; + + static OpenNettyAddress? CreateAddress(string? address) => address switch + { + null or { Length: 0 } => null, + + string value when uint.TryParse(value, CultureInfo.InvariantCulture, out uint result) && result % 16 is 0 + => new OpenNettyAddress(OpenNettyAddressType.NitooDevice, value), + + string value when uint.TryParse(value, CultureInfo.InvariantCulture, out uint result) && result % 16 is not 0 + => new OpenNettyAddress(OpenNettyAddressType.NitooUnit, value), + + _ => throw new InvalidOperationException(SR.GetResourceString(SR.ID0066)) + }; + } + + else + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + } + + // Then, infer the dimension from the DIMENSION field if the message is either + // a dimension request, a dimension read or a dimension set message. + if (message.Type is OpenNettyMessageType.DimensionRead or OpenNettyMessageType.DimensionRequest) + { + // In the typical case, the DIMENSION field doesn't include any extra parameter. + var field = frame.Fields[2]; + if (field.Parameters.Length is 1) + { + message.Dimension = new OpenNettyDimension(message.Category!.Value, field.Parameters[0].Value); + } + + else + { + var parameters = new List(capacity: field.Parameters.Length - 1); + + for (var index = 1; index < field.Parameters.Length; index++) + { + parameters.Add(field.Parameters[index].Value); + } + + message.Dimension = new OpenNettyDimension(message.Category!.Value, field.Parameters[0].Value, [.. parameters]); + } + } + + else if (message.Type is OpenNettyMessageType.DimensionSet) + { + // In the typical case, the DIMENSION field doesn't include any extra parameter. + var field = frame.Fields[2]; + if (field.Parameters.Length is 2) + { + // When the message is a dimension set, the DIMENSION field is always prefixed by a #. + message.Dimension = new OpenNettyDimension(message.Category!.Value, field.Parameters[1].Value); + } + + else + { + var parameters = new List(capacity: field.Parameters.Length - 1); + + for (var index = 1; index < field.Parameters.Length; index++) + { + parameters.Add(field.Parameters[index].Value); + } + + // When the message is a dimension set, the DIMENSION field is always prefixed by a #. + message.Dimension = new OpenNettyDimension(message.Category!.Value, field.Parameters[1].Value, [.. parameters]); + } + } + + // Finally, if the message is a dimension read or dimension set message, extract the values. + if (message.Type is OpenNettyMessageType.DimensionRead or OpenNettyMessageType.DimensionSet) + { + var values = new List(capacity: frame.Fields.Length - 3); + + for (var index = 3; index < frame.Fields.Length; index++) + { + values.Add(frame.Fields[index].Parameters[0].Value); + } + + message.Values = [.. values]; + } + + return message; + } + + /// + /// Creates a new BUS COMMAND message using the specified parameters. + /// + /// The protocol. + /// The command. + /// The address. + /// The media. + /// The transmission mode. + /// A new BUS COMMAND message reflecting the specified parameters. + public static OpenNettyMessage CreateCommand( + OpenNettyProtocol protocol, OpenNettyCommand command, + OpenNettyAddress? address = null, OpenNettyMedia? media = null, OpenNettyMode? mode = null) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + return CreateFromFrame(protocol, new OpenNettyFrame( + /* WHO: */ CreateWhoField(OpenNettyMessageType.BusCommand, command.Category), + /* WHAT */ new OpenNettyField(command.ToParameters()), + /* WHERE: */ CreateWhereField(protocol, command.Category, address, media, mode))); + } + + /// + /// Creates a new STATUS REQUEST message using the specified parameters. + /// + /// The protocol. + /// The category. + /// The address. + /// The media. + /// The transmission mode. + /// A new STATUS REQUEST message reflecting the specified parameters. + public static OpenNettyMessage CreateStatusRequest( + OpenNettyProtocol protocol, OpenNettyCategory category, OpenNettyAddress? address = null, + OpenNettyMedia? media = null, OpenNettyMode? mode = null) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + return CreateFromFrame(protocol, new OpenNettyFrame( + /* #WHO: */ CreateWhoField(OpenNettyMessageType.StatusRequest, category), + /* WHERE: */ CreateWhereField(protocol, category, address, media, mode))); + } + + /// + /// Creates a new DIMENSION REQUEST message using the specified parameters. + /// + /// The protocol. + /// The dimension. + /// The address. + /// The media. + /// The transmission mode. + /// A new DIMENSION REQUEST message reflecting the specified parameters. + public static OpenNettyMessage CreateDimensionRequest( + OpenNettyProtocol protocol, OpenNettyDimension dimension, OpenNettyAddress? address = null, + OpenNettyMedia? media = null, OpenNettyMode? mode = null) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + return CreateFromFrame(protocol, new OpenNettyFrame( + /* #WHO: */ CreateWhoField(OpenNettyMessageType.DimensionRequest, dimension.Category), + /* WHERE: */ CreateWhereField(protocol, dimension.Category, address, media, mode), + /* DIMENSION: */ CreateDimensionField(OpenNettyMessageType.DimensionRequest, dimension))); + } + + /// + /// Creates a new DIMENSION READ message using the specified parameters. + /// + /// The protocol. + /// The dimension. + /// The dimension values. + /// The address. + /// The media. + /// The transmission mode. + /// A new DIMENSION READ message reflecting the specified parameters. + public static OpenNettyMessage CreateDimensionRead( + OpenNettyProtocol protocol, OpenNettyDimension dimension, + ImmutableArray values, OpenNettyAddress? address = null, + OpenNettyMedia? media = null, OpenNettyMode? mode = null) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (values.Length is 0) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0067)); + } + + var fields = new List(capacity: 3 + values.Length) + { + /* #WHO: */ CreateWhoField(OpenNettyMessageType.DimensionRead, dimension.Category), + /* WHERE: */ CreateWhereField(protocol, dimension.Category, address, media, mode), + /* DIMENSION: */ CreateDimensionField(OpenNettyMessageType.DimensionRead, dimension) + }; + + for (var index = 0; index < values.Length; index++) + { + fields.Add(new OpenNettyField(new OpenNettyParameter(values[index]))); + } + + return CreateFromFrame(protocol, new OpenNettyFrame(fields.ToImmutableArray())); + } + + /// + /// Creates a new DIMENSION SET message using the specified parameters. + /// + /// The protocol. + /// The dimension. + /// The dimension values. + /// The address. + /// The media. + /// The transmission mode. + /// A new DIMENSION SET message reflecting the specified parameters. + public static OpenNettyMessage CreateDimensionSet( + OpenNettyProtocol protocol, OpenNettyDimension dimension, + ImmutableArray values, OpenNettyAddress? address = null, + OpenNettyMedia? media = null, OpenNettyMode? mode = null) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (values.IsDefaultOrEmpty) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0068)); + } + + var fields = new List(capacity: 3 + values.Length) + { + /* #WHO: */ CreateWhoField(OpenNettyMessageType.DimensionSet, dimension.Category), + /* WHERE: */ CreateWhereField(protocol, dimension.Category, address, media, mode), + /* #DIMENSION: */ CreateDimensionField(OpenNettyMessageType.DimensionSet, dimension) + }; + + for (var index = 0; index < values.Length; index++) + { + fields.Add(new OpenNettyField(new OpenNettyParameter(values[index]))); + } + + return CreateFromFrame(protocol, new OpenNettyFrame(fields.ToImmutableArray())); + } + + /// + public bool Equals(OpenNettyMessage? other) + { + if (ReferenceEquals(this, other)) + { + return true; + } + + // Note: comparing the protocol and the raw frame is enough to determine whether two messages are equal. + return other is not null && Protocol == other.Protocol && Frame == other.Frame; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyMessage message && Equals(message); + + /// + public override int GetHashCode() => HashCode.Combine(Protocol, Frame); + + /// + /// Computes the representation of the current message. + /// + /// The representation of the current message. + public override string ToString() => Frame.ToString(); + + private static OpenNettyField CreateWhoField(OpenNettyMessageType type, OpenNettyCategory category) + { + List parameters = []; + + switch (type) + { + case OpenNettyMessageType.StatusRequest: + case OpenNettyMessageType.DimensionRequest: + case OpenNettyMessageType.DimensionRead: + case OpenNettyMessageType.DimensionSet: + parameters.Add(OpenNettyParameter.Empty); + break; + } + + parameters.AddRange(category.ToParameters()); + + return new OpenNettyField(parameters); + } + + private static OpenNettyField CreateWhereField( + OpenNettyProtocol protocol, OpenNettyCategory category, + OpenNettyAddress? address, OpenNettyMedia? media, OpenNettyMode? mode) + { + if (address is null) + { + return OpenNettyField.Empty; + } + + if (protocol is OpenNettyProtocol.Scs) + { + return new OpenNettyField(address.Value.ToParameters()); + } + + else if (protocol is OpenNettyProtocol.Nitoo) + { + List parameters = []; + + switch (mode) + { + case OpenNettyMode.Multicast: + parameters.Add(OpenNettyParameter.Empty); + break; + + // Note: broadcast is always the default transmission mode for WHO=25 messages (scenarios). + case OpenNettyMode.Broadcast: + case null when category == OpenNettyCategories.Scenarios: + parameters.Add(new OpenNettyParameter("0")); + break; + } + + parameters.AddRange(address.Value.ToParameters()); + + // Note: when using the default powerline transmission media, adding an explicit parameter is not required. + if (media is not null and not OpenNettyMedia.Powerline) + { + parameters.Add(new OpenNettyParameter(media switch + { + OpenNettyMedia.Radio => "1", + OpenNettyMedia.Infrared => "2", + + _ => throw new InvalidOperationException(SR.GetResourceString(SR.ID0069)) + })); + } + + return new OpenNettyField(parameters); + } + + else if (protocol is OpenNettyProtocol.Zigbee) + { + List parameters = []; + + switch (mode) + { + case OpenNettyMode.Multicast: + parameters.Add(OpenNettyParameter.Empty); + break; + + // Note: broadcast is always the default transmission mode for messages + // sent to an address that doesn't include a device identifier part. + case OpenNettyMode.Broadcast: + case null when address.Value.Type is OpenNettyAddressType.ZigbeeAllDevicesAllUnits or + OpenNettyAddressType.ZigbeeAllDevicesSpecificUnit: + parameters.Add(new OpenNettyParameter("0")); + break; + } + + parameters.AddRange(address.Value.ToParameters()); + + parameters.Add(new OpenNettyParameter(media switch + { + OpenNettyMedia.Radio or null => "9", + + _ => throw new InvalidOperationException(SR.GetResourceString(SR.ID0069)) + })); + + return new OpenNettyField(parameters); + } + + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + private static OpenNettyField CreateDimensionField(OpenNettyMessageType type, OpenNettyDimension dimension) + { + Debug.Assert(type is OpenNettyMessageType.DimensionRead or + OpenNettyMessageType.DimensionRequest or + OpenNettyMessageType.DimensionSet); + + List parameters = []; + + if (type is OpenNettyMessageType.DimensionSet) + { + parameters.Add(OpenNettyParameter.Empty); + } + + parameters.AddRange(dimension.ToParameters()); + + return new OpenNettyField(parameters); + } + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyMessage? left, OpenNettyMessage? right) + => ReferenceEquals(left, right) || (left is not null && right is not null && left.Equals(right)); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyMessage? left, OpenNettyMessage? right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyMessageType.cs b/src/OpenNetty/OpenNettyMessageType.cs new file mode 100644 index 0000000..c95eea8 --- /dev/null +++ b/src/OpenNetty/OpenNettyMessageType.cs @@ -0,0 +1,52 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty message types, as defined by the Nitoo and MyHome specifications. +/// +public enum OpenNettyMessageType +{ + /// + /// Unknown frame. + /// + Unknown = 0, + + /// + /// ACK frame. + /// + Acknowledgement = 1, + + /// + /// Bus command. + /// + BusCommand = 2, + + /// + /// BUSY NACK frame (Zigbee-specific). + /// + BusyNegativeAcknowledgement = 3, + + /// + /// Dimension read. + /// + DimensionRead = 4, + + /// + /// Dimension request. + /// + DimensionRequest = 5, + + /// + /// Dimension set. + /// + DimensionSet = 6, + + /// + /// NACK frame. + /// + NegativeAcknowledgement = 7, + + /// + /// Status request. + /// + StatusRequest = 8 +} diff --git a/src/OpenNetty/OpenNettyMode.cs b/src/OpenNetty/OpenNettyMode.cs new file mode 100644 index 0000000..183ea37 --- /dev/null +++ b/src/OpenNetty/OpenNettyMode.cs @@ -0,0 +1,22 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty transmission modes, as defined by the Nitoo and MyHome specifications. +/// +public enum OpenNettyMode +{ + /// + /// Broadcast (one-to-all communication). + /// + Broadcast = 0, + + /// + /// Multicast (one-to-many communication). + /// + Multicast = 1, + + /// + /// Unicast (one-to-one communication). + /// + Unicast = 2 +} diff --git a/src/OpenNetty/OpenNettyModels.cs b/src/OpenNetty/OpenNettyModels.cs new file mode 100644 index 0000000..df58864 --- /dev/null +++ b/src/OpenNetty/OpenNettyModels.cs @@ -0,0 +1,458 @@ +using System.Collections.Immutable; +using System.Globalization; + +namespace OpenNetty; + +/// +/// Exposes common OpenNetty models, as defined by the Nitoo and MyHome specifications. +/// +public static class OpenNettyModels +{ + /// + /// Lighting models (WHO = 1). + /// + public static class Lighting + { + /// + /// Switch state. + /// + public enum SwitchState + { + /// + /// Off. + /// + Off = 0, + + /// + /// On. + /// + On = 1 + } + } + + /// + /// Temperature control models (WHO = 4). + /// + public static class TemperatureControl + { + /// + /// Pilot wire mode. + /// + public enum PilotWireMode + { + /// + /// Comfort. + /// + Comfort = 0, + + /// + /// Comfort - 1°C. + /// + ComfortMinusOne = 1, + + /// + /// Comfort - 2°C. + /// + ComfortMinusTwo = 2, + + /// + /// Eco (comfort - 4°C). + /// + Eco = 3, + + /// + /// Frost protection (~7°C). + /// + FrostProtection = 4 + } + + /// + /// Pilot wire configuration. + /// + public sealed record class PilotWireConfiguration + { + /// + /// Gets or sets the derogation duration. + /// + public required PilotWireDerogationDuration? DerogationDuration { get; init; } + + /// + /// Gets or sets a boolean indicating whether a derogation is active. + /// + public required bool IsDerogationActive { get; init; } + + /// + /// Gets or sets the pilot wire mode. + /// + public required PilotWireMode Mode { get; init; } + + /// + /// Creates a new instance of the class using the specified unit description. + /// + /// The unit description values. + /// A new instance of the class. + public static PilotWireConfiguration CreateFromUnitDescription(ImmutableArray values) => new() + { + DerogationDuration = ushort.Parse(values[0], CultureInfo.InvariantCulture) switch + { + >= 8 and < 72 => PilotWireDerogationDuration.None, + >= 72 and < 136 => PilotWireDerogationDuration.FourHours, + >= 136 => PilotWireDerogationDuration.EightHours, + + _ => null + }, + IsDerogationActive = ushort.Parse(values[0], CultureInfo.InvariantCulture) is >= 8, + Mode = values[0] switch + { + "0" or "8" or "72" or "136" => PilotWireMode.Comfort, + "1" or "9" or "73" or "137" => PilotWireMode.ComfortMinusOne, + "2" or "10" or "74" or "138" => PilotWireMode.ComfortMinusTwo, + "3" or "11" or "75" or "139" => PilotWireMode.Eco, + "4" or "12" or "76" or "140" => PilotWireMode.FrostProtection, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + } + }; + } + + /// + /// Pilot wire derogation duration. + /// + public enum PilotWireDerogationDuration + { + /// + /// None. + /// + None = 0, + + /// + /// 4 hours. + /// + FourHours = 1, + + /// + /// 8 hours. + /// + EightHours = 2 + } + + /// + /// Smart meter indexes. + /// + public sealed record class SmartMeterIndexes + { + /// + /// Gets or sets the base index. + /// + public required ulong BaseIndex { get; init; } + + /// + /// Gets or sets the blue index, if available. + /// + public required ulong? BlueIndex { get; init; } + + /// + /// Gets or sets the off-peak index, if available. + /// + public required ulong? OffPeakIndex { get; init; } + + /// + /// Gets or sets the red index, if available. + /// + public required ulong? RedIndex { get; init; } + + /// + /// Gets or sets the subscription type. + /// + public required SmartMeterSubscriptionType SubscriptionType { get; init; } + + /// + /// Gets or sets the blue index, if available. + /// + public required ulong? WhiteIndex { get; init; } + + /// + /// Creates a new instance of the class using the specified unit description. + /// + /// The unit description values. + /// A new instance of the class. + public static SmartMeterIndexes CreateFromDimensionValues(ImmutableArray values) => new() + { + BaseIndex = ulong.Parse(values[1], CultureInfo.InvariantCulture), + BlueIndex = values[0] is "3" ? ulong.Parse(values[2], CultureInfo.InvariantCulture) : null, + OffPeakIndex = values[0] is "2" ? ulong.Parse(values[2], CultureInfo.InvariantCulture) : null, + RedIndex = values[0] is "5" ? ulong.Parse(values[2], CultureInfo.InvariantCulture) : null, + SubscriptionType = values[0] switch + { + "1" => SmartMeterSubscriptionType.Base, + "2" => SmartMeterSubscriptionType.OffPeak, + "3" or "4" or "5" => SmartMeterSubscriptionType.Tempo, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + }, + WhiteIndex = values[0] is "4" ? ulong.Parse(values[2], CultureInfo.InvariantCulture) : null, + }; + } + + /// + /// Smart meter information. + /// + public sealed record class SmartMeterInformation + { + /// + /// Gets or sets a boolean indicating whether a power cut is active. + /// + public required bool IsPowerCutActive { get; init; } + + /// + /// Gets or sets the rate type. + /// + public required SmartMeterRateType RateType { get; init; } + + /// + /// Creates a new instance of the class using the specified unit description. + /// + /// The unit description values. + /// A new instance of the class. + public static SmartMeterInformation CreateFromUnitDescription(ImmutableArray values) => new() + { + IsPowerCutActive = values[0] is "33" or "49", + RateType = values[0] switch + { + "32" or "33" => SmartMeterRateType.OffPeak, + "48" or "49" => SmartMeterRateType.Peak, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + } + }; + } + + /// + /// Smart meter subscription type. + /// + public enum SmartMeterSubscriptionType + { + /// + /// Base. + /// + Base = 0, + + /// + /// Off-peak. + /// + OffPeak = 1, + + /// + /// Tempo. + /// + Tempo = 2 + } + + /// + /// Smart meter rate type. + /// + public enum SmartMeterRateType + { + /// + /// Peak. + /// + Peak = 0, + + /// + /// Off-peak. + /// + OffPeak = 1 + } + + /// + /// Water heater mode. + /// + public enum WaterHeaterMode + { + /// + /// Automatic. + /// + Automatic = 0, + + /// + /// Forced off (no hot water will be produced). + /// + ForcedOff = 1, + + /// + /// Forced on (hot water will be produced until the next off-peak signal, if available). + /// + ForcedOn = 2 + } + + /// + /// Water heater state. + /// + public enum WaterHeaterState + { + /// + /// Idle. + /// + Idle = 0, + + /// + /// Heating. + /// + Heating = 1 + } + } + + /// + /// Alarm models (WHO = 5). + /// + public static class Alarm + { + /// + /// Wireless burglar alarm state. + /// + public enum WirelessBurglarAlarmState + { + /// + /// Disarmed. + /// + Disarmed = 0, + + /// + /// Armed. + /// + Armed = 1, + + /// + /// Partially armed. + /// + PartiallyArmed = 2, + + /// + /// Exit delay elapsed. + /// + ExitDelayElapsed = 3, + + /// + /// Alarm triggered. + /// + Triggered = 4, + + /// + /// Event detected. + /// + EventDetected = 5 + } + } + + /// + /// Diagnostics models (WHO = 1000). + /// + public static class Diagnostics + { + /// + /// Device description. + /// + public sealed record class DeviceDescription + { + /// + /// Gets or sets the function code. + /// + public required ushort FunctionCode { get; init; } + + /// + /// Gets or sets the device model. + /// + public required string Model { get; init; } + + /// + /// Gets or sets the number of units available. + /// + public required ushort Units { get; init; } + + /// + /// Gets or sets the device version. + /// + public required Version Version { get; init; } + + /// + /// Creates a new instance of the class using the specified unit description. + /// + /// The unit description values. + /// A new instance of the class. + public static DeviceDescription CreateFromDeviceDescription(ImmutableArray values) => new() + { + FunctionCode = ushort.Parse(values[2], CultureInfo.InvariantCulture), + Model = uint.Parse(values[0], CultureInfo.InvariantCulture).ToString("X"), + Units = ushort.Parse(values[3], CultureInfo.InvariantCulture), + Version = new Version(int.Parse(uint.Parse(values[1], CultureInfo.InvariantCulture).ToString("X")), 0) + }; + } + + /// + /// Memory data. + /// + public sealed record class MemoryData + { + /// + /// Gets or sets the address. + /// + public required OpenNettyAddress Address { get; init; } + + /// + /// Gets or sets the function code. + /// + public required ushort FunctionCode { get; init; } + + /// + /// Gets or sets the media. + /// + public required OpenNettyMedia Media { get; init; } + + /// + /// Creates a new instance of the class using the specified unit description. + /// + /// The unit description values. + /// A new instance of the class. + public static MemoryData CreateFromUnitDescription(ImmutableArray values) => new() + { + Address = new OpenNettyAddress(OpenNettyAddressType.NitooDevice, values[1]), + FunctionCode = ushort.Parse(values[2], CultureInfo.InvariantCulture), + Media = values[0] switch + { + "64" => OpenNettyMedia.Radio, + "96" => OpenNettyMedia.Powerline, + "128" => OpenNettyMedia.Infrared, + + _ => throw new InvalidDataException(SR.GetResourceString(SR.ID0075)) + } + }; + } + + /// + /// Unit description. + /// + public sealed record class UnitDescription + { + /// + /// Gets or sets the function code. + /// + public required ushort FunctionCode { get; init; } + + /// + /// Gets or sets the values. + /// + public ImmutableArray Values { get; init; } + + /// + /// Creates a new instance of the class using the specified unit description. + /// + /// The unit description values. + /// A new instance of the class. + public static UnitDescription CreateFromUnitDescription(ImmutableArray values) => new() + { + FunctionCode = ushort.Parse(values[0], CultureInfo.InvariantCulture), + Values = values[1..] + }; + } + } +} diff --git a/src/OpenNetty/OpenNettyNotification.cs b/src/OpenNetty/OpenNettyNotification.cs new file mode 100644 index 0000000..7440dee --- /dev/null +++ b/src/OpenNetty/OpenNettyNotification.cs @@ -0,0 +1,12 @@ +namespace OpenNetty; + +/// +/// Represents an abstract OpenNetty notification. +/// +public abstract class OpenNettyNotification +{ + /// + /// Gets or sets the OpenNetty gateway associated with the notification. + /// + public required OpenNettyGateway Gateway { get; init; } +} diff --git a/src/OpenNetty/OpenNettyNotifications.cs b/src/OpenNetty/OpenNettyNotifications.cs new file mode 100644 index 0000000..968ac59 --- /dev/null +++ b/src/OpenNetty/OpenNettyNotifications.cs @@ -0,0 +1,170 @@ +namespace OpenNetty; + +/// +/// Exposes common notifications supported by the OpenNetty stack. +/// +public static class OpenNettyNotifications +{ + /// + /// Represents a notification dispatched when an outgoing message is ready to be sent. + /// + public sealed class MessageReady : OpenNettyNotification + { + /// + /// Gets or sets the message to send. + /// + public required OpenNettyMessage Message { get; init; } + + /// + /// Gets or sets the transmission options to use. + /// + public required OpenNettyTransmissionOptions Options { get; init; } + + /// + /// Gets or sets the transaction associated with the notification. + /// + public required OpenNettyTransaction Transaction { get; init; } + } + + /// + /// Represents a notification dispatched when an outgoing message was successfully sent. + /// + public sealed class MessageSent : OpenNettyNotification + { + /// + /// Gets or sets the message that was successfully sent. + /// + public required OpenNettyMessage Message { get; init; } + + /// + /// Gets or sets the session that was used to send the message. + /// + public required OpenNettySession Session { get; init; } + + /// + /// Gets or sets the transaction associated with the notification. + /// + public required OpenNettyTransaction Transaction { get; init; } + } + + /// + /// Represents a notification dispatched when an incoming message was received. + /// + public sealed class MessageReceived : OpenNettyNotification + { + /// + /// Gets or sets the received message. + /// + public required OpenNettyMessage Message { get; init; } + + /// + /// Gets or sets the session that received the message. + /// + public required OpenNettySession Session { get; init; } + } + + /// + /// Represents a notification dispatched when an outgoing message was rejected by a Nitoo device. + /// + public sealed class InvalidAction : OpenNettyNotification + { + /// + /// Gets or sets the message that was rejected by the device. + /// + public required OpenNettyMessage Message { get; init; } + + /// + /// Gets or sets the session that was used to send the message. + /// + public required OpenNettySession Session { get; init; } + + /// + /// Gets or sets the transaction associated with the notification. + /// + public required OpenNettyTransaction Transaction { get; init; } + } + + /// + /// Represents a notification dispatched when an outgoing message was rejected by the gateway. + /// + public sealed class InvalidFrame : OpenNettyNotification + { + /// + /// Gets or sets the message that was rejected by the gateway. + /// + public required OpenNettyMessage Message { get; init; } + + /// + /// Gets or sets the session that was used to send the message. + /// + public required OpenNettySession Session { get; init; } + + /// + /// Gets or sets the transaction associated with the notification. + /// + public required OpenNettyTransaction Transaction { get; init; } + } + + /// + /// Represents a notification dispatched when an outgoing message was not validated by a Nitoo device. + /// + public sealed class NoActionReceived : OpenNettyNotification + { + /// + /// Gets or sets the message that wasn't validated by the device. + /// + public required OpenNettyMessage Message { get; init; } + + /// + /// Gets or sets the session that was used to send the message. + /// + public required OpenNettySession Session { get; init; } + + /// + /// Gets or sets the transaction associated with the notification. + /// + public required OpenNettyTransaction Transaction { get; init; } + } + + /// + /// Represents a notification dispatched when an outgoing message was not validated by the gateway. + /// + public sealed class NoAcknowledgmentReceived : OpenNettyNotification + { + /// + /// Gets or sets the message that wasn't validated by the device. + /// + public required OpenNettyMessage Message { get; init; } + + /// + /// Gets or sets the session that was used to send the message. + /// + public required OpenNettySession Session { get; init; } + + /// + /// Gets or sets the transaction associated with the notification. + /// + public required OpenNettyTransaction Transaction { get; init; } + } + + /// + /// Represents a notification dispatched when an outgoing message was rejected by a busy gateway. + /// + public sealed class GatewayBusy : OpenNettyNotification + { + /// + /// Gets or sets the message that was rejected by the gateway. + /// + public required OpenNettyMessage Message { get; init; } + + /// + /// Gets or sets the session that was used to send the message. + /// + public required OpenNettySession Session { get; init; } + + /// + /// Gets or sets the transaction associated with the notification. + /// + public required OpenNettyTransaction Transaction { get; init; } + } +} diff --git a/src/OpenNetty/OpenNettyOptions.cs b/src/OpenNetty/OpenNettyOptions.cs new file mode 100644 index 0000000..cb59346 --- /dev/null +++ b/src/OpenNetty/OpenNettyOptions.cs @@ -0,0 +1,17 @@ +namespace OpenNetty; + +/// +/// Provides various settings needed to configure the OpenNetty services. +/// +public sealed class OpenNettyOptions +{ + /// + /// Gets the list of registered gateways. + /// + public List Gateways { get; } = []; + + /// + /// Gets the list of registered endpoints. + /// + public List Endpoints { get; } = []; +} diff --git a/src/OpenNetty/OpenNettyParameter.cs b/src/OpenNetty/OpenNettyParameter.cs new file mode 100644 index 0000000..6bd1fb2 --- /dev/null +++ b/src/OpenNetty/OpenNettyParameter.cs @@ -0,0 +1,93 @@ +using System.Diagnostics; +using System.Text; + +namespace OpenNetty; + +/// +/// Represents a raw OpenNetty parameter. +/// +[DebuggerDisplay("{ToString(),nq}")] +public readonly struct OpenNettyParameter : IEquatable +{ + /// + /// Creates a new instance of the structure. + /// + /// The value. + public OpenNettyParameter(string value) + { + ArgumentNullException.ThrowIfNull(value); + + // Ensure the value only includes ASCII digits. + foreach (var character in value) + { + if (!char.IsAsciiDigit(character)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0004), nameof(value)); + } + } + + Value = value; + } + + /// + /// Gets a boolean indicating whether the parameter represents an empty value. + /// + public bool IsEmpty => string.IsNullOrEmpty(Value); + + /// + /// Gets the value associated with the parameter. + /// + public string Value { get; } + + /// + /// Represents an empty parameter. + /// + public static readonly OpenNettyParameter Empty = new(string.Empty); + + /// + /// Parses an OpenNetty parameter from the specified . + /// + /// The ASCII/UTF-8 buffer containing the raw parameter. + /// The OpenNetty parameter corresponding to the specified . + public static OpenNettyParameter Parse(ReadOnlySpan buffer) + { + // Note: parameters can be omitted. In this case, they are represented as empty values. + if (buffer.IsEmpty) + { + return new(string.Empty); + } + + return new(Encoding.ASCII.GetString(buffer)); + } + + /// + public bool Equals(OpenNettyParameter other) => string.Equals(Value, other.Value, StringComparison.OrdinalIgnoreCase); + + /// + public override bool Equals(object? obj) => obj is OpenNettyParameter parameter && Equals(parameter); + + /// + public override int GetHashCode() => Value?.GetHashCode() ?? 0; + + /// + /// Computes the representation of the current parameter. + /// + /// The representation of the current parameter. + public override string ToString() => Value ?? string.Empty; + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyParameter left, OpenNettyParameter right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyParameter left, OpenNettyParameter right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyPipe.cs b/src/OpenNetty/OpenNettyPipe.cs new file mode 100644 index 0000000..daae6e7 --- /dev/null +++ b/src/OpenNetty/OpenNettyPipe.cs @@ -0,0 +1,213 @@ +using System.Buffers; +using System.Diagnostics.CodeAnalysis; +using System.IO.Pipelines; +using System.Runtime.CompilerServices; +using System.Text; +using static OpenNetty.OpenNettyConstants; + +namespace OpenNetty; + +/// +/// Represents a duplex pipe from which OpenWebNet frames can be read from and written to. +/// +public class OpenNettyPipe : IDisposable +{ + private readonly PipeReader _reader; + private SemaphoreSlim? _readLock = new(initialCount: 1, maxCount: 1); + private readonly PipeWriter _writer; + private SemaphoreSlim? _writeLock = new(initialCount: 1, maxCount: 1); + + /// + /// Creates a new instance of the class. + /// + /// The pipe reader. + /// The pipe writer. + /// The pipe reader or writer is null. + public OpenNettyPipe(PipeReader reader, PipeWriter writer) + { + _reader = reader ?? throw new ArgumentNullException(nameof(reader)); + _writer = writer ?? throw new ArgumentNullException(nameof(writer)); + } + + /// + /// Creates a new OpenNetty pipe wrapping the specified duplex pipe. + /// + /// The duplex pipe. + /// The OpenNetty pipe. + public static OpenNettyPipe Create(IDuplexPipe pipe) + { + ArgumentNullException.ThrowIfNull(pipe); + + return new OpenNettyPipe(pipe.Input, pipe.Output); + } + + /// + /// Creates a new OpenNetty pipe wrapping the specified stream. + /// + /// Note: the stream is not closed when this instance is disposed. + /// The stream. + /// The OpenNetty pipe. + public static OpenNettyPipe Create(Stream stream) + { + ArgumentNullException.ThrowIfNull(stream); + + if (!stream.CanRead) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0038), nameof(stream)); + } + + if (!stream.CanWrite) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0039), nameof(stream)); + } + + return new OpenNettyPipe( + reader: PipeReader.Create(stream, new StreamPipeReaderOptions(leaveOpen: true)), + writer: PipeWriter.Create(stream, new StreamPipeWriterOptions(leaveOpen: true))); + } + + /// + /// Disposes the current instance. + /// + public void Dispose() + { + if (Interlocked.Exchange(ref _readLock, null) is SemaphoreSlim readLock) + { + readLock.Dispose(); + } + + if (Interlocked.Exchange(ref _writeLock, null) is SemaphoreSlim writeLock) + { + writeLock.Dispose(); + } + + GC.SuppressFinalize(this); + } + + /// + /// Reads the next frame available. + /// + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation and whose result + /// returns the next frame available, or if the end of the data stream has been reached. + /// + /// The current instance has been disposed. + /// A concurrent read operation is already being processed. + public async ValueTask ReadAsync(CancellationToken cancellationToken = default) + { + if (_readLock is not SemaphoreSlim semaphore) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0040)); + } + + if (!await semaphore.WaitAsync(TimeSpan.Zero, cancellationToken)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0041)); + } + + try + { + while (!cancellationToken.IsCancellationRequested) + { + var result = await _reader.ReadAsync(cancellationToken); + if (result.IsCanceled) + { + break; + } + + var buffer = result.Buffer; + var consumed = buffer.Start; + var examined = buffer.End; + + try + { + if (TryReadFrame(ref buffer, out OpenNettyFrame? frame)) + { + consumed = buffer.Start; + examined = consumed; + + return frame.Value; + } + + if (result.IsCompleted) + { + break; + } + } + + finally + { + _reader.AdvanceTo(consumed, examined); + } + } + + return null; + } + + finally + { + semaphore.Release(); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + static bool TryReadFrame(ref ReadOnlySequence buffer, [NotNullWhen(true)] out OpenNettyFrame? frame) + { + var reader = new SequenceReader(buffer); + + if (!reader.TryAdvanceTo(Separators.Asterisk[0], advancePastDelimiter: false)) + { + frame = null; + return false; + } + + var start = reader.Position; + + // Note: the ReadOnlySequence returned by TryReadTo() doesn't include the end delimiter (##) + // that is part of an OpenWebNet frame and is required by OpenNettyFrame.Parse(). To work around + // this limitation, the returned sequence is ignored and a slice is done on the original sequence. + if (!reader.TryReadTo(out ReadOnlySequence _, Delimiters.End, advancePastDelimiter: true)) + { + frame = null; + return false; + } + + frame = OpenNettyFrame.Parse(buffer.Slice(start, reader.Position)); + buffer = buffer.Slice(reader.Position); + return true; + } + } + + /// + /// Writes the specified frame. + /// + /// The frame. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation. + /// + /// The current instance has been disposed. + /// A concurrent write operation is already being processed. + public async ValueTask WriteAsync(OpenNettyFrame frame, CancellationToken cancellationToken = default) + { + if (_writeLock is not SemaphoreSlim semaphore) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0040)); + } + + if (!await semaphore.WaitAsync(TimeSpan.Zero, cancellationToken)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0042)); + } + + try + { + await _writer.WriteAsync(Encoding.ASCII.GetBytes(frame.ToString()), cancellationToken); + } + + finally + { + semaphore.Release(); + } + } +} diff --git a/src/OpenNetty/OpenNettyPipeline.cs b/src/OpenNetty/OpenNettyPipeline.cs new file mode 100644 index 0000000..c84bb99 --- /dev/null +++ b/src/OpenNetty/OpenNettyPipeline.cs @@ -0,0 +1,92 @@ +using System.ComponentModel; +using System.Reactive.Concurrency; +using System.Reactive.Linq; +using System.Reactive.Subjects; +using System.Threading.Channels; +using Microsoft.Extensions.Hosting; + +namespace OpenNetty; + +/// +/// Represents a thread-safe notification pipeline that can be observed and whose delivery order is guaranteed. +/// +[EditorBrowsable(EditorBrowsableState.Advanced)] +public sealed class OpenNettyPipeline : IOpenNettyPipeline, IDisposable +{ + private readonly Channel _channel = Channel.CreateUnbounded(new UnboundedChannelOptions + { + AllowSynchronousContinuations = false, + SingleReader = true, + SingleWriter = false + }); + + private readonly IConnectableAsyncObservable _observable; + private readonly CancellationTokenRegistration _registration; + + /// + /// Creates a new instance of the class. + /// + /// The host application lifetime. + public OpenNettyPipeline(IHostApplicationLifetime lifetime) + { + _observable = AsyncObservable.Create(observer => + { + return TaskPoolAsyncScheduler.Default.ScheduleAsync(async cancellationToken => + { + while (!cancellationToken.IsCancellationRequested) + { + try + { + if (!await _channel.Reader.WaitToReadAsync(cancellationToken)) + { + await observer.OnCompletedAsync(); + return; + } + + while (_channel.Reader.TryRead(out OpenNettyNotification? notification)) + { + await observer.OnNextAsync(notification); + } + } + + catch (ChannelClosedException) + { + await observer.OnCompletedAsync(); + return; + } + + catch (Exception exception) + { + await observer.OnErrorAsync(exception); + } + } + }); + }) + .Retry() + .Multicast(new ConcurrentSimpleAsyncSubject()); + + // Marks the channel as completed when the host indicates the application is shutting down. + _registration = lifetime.ApplicationStopping.Register(static state => + ((OpenNettyPipeline) state!)._channel.Writer.TryComplete(), this); + } + + /// + /// Registers a new notification observer. + /// + /// + /// A that can be used to monitor the asynchronous operation and whose + /// result is used by the caller to indicate that the subscription should be aborted and discarded. + /// + public ValueTask SubscribeAsync(IAsyncObserver observer) + => _observable.ObserveOn(TaskPoolAsyncScheduler.Default).SubscribeAsync(observer); + + /// + public ValueTask PublishAsync(OpenNettyNotification notification, CancellationToken cancellationToken = default) + => _channel.Writer.WriteAsync(notification, cancellationToken); + + /// + public ValueTask ConnectAsync() => _observable.ConnectAsync(); + + /// + public void Dispose() => _registration.Dispose(); +} diff --git a/src/OpenNetty/OpenNettyProtocol.cs b/src/OpenNetty/OpenNettyProtocol.cs new file mode 100644 index 0000000..8cb9d6f --- /dev/null +++ b/src/OpenNetty/OpenNettyProtocol.cs @@ -0,0 +1,22 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty protocols, as defined by the Nitoo and MyHome specifications. +/// +public enum OpenNettyProtocol +{ + /// + /// SCS (used in the Legrand/BTicino MyHome and MyHome Up products). + /// + Scs = 0, + + /// + /// Nitoo (used in the In One by Legrand products). + /// + Nitoo = 1, + + /// + /// Zigbee (used in the Legrand/BTicino MyHome Play products). + /// + Zigbee = 2 +} diff --git a/src/OpenNetty/OpenNettyResources.resx b/src/OpenNetty/OpenNettyResources.resx new file mode 100644 index 0000000..d54a538 --- /dev/null +++ b/src/OpenNetty/OpenNettyResources.resx @@ -0,0 +1,453 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + The frame must start with a '*' character. + + + The frame cannot contain two consecutive '#', except when used at the end of the frame. + + + The frame must end with two '#' characters. + + + The value can only include digits from 0 to 9. + + + The parameter cannot contain two consecutive '#'. + + + The specified brand is not supported. + + + The connection has been disposed. + + + The connection has already been started. + + + The session has been disposed and is no longer usable. + + + The protocol attached to the message isn't valid. + + + Action validation is only supported by Nitoo devices. + + + Action validation is only supported for BUS COMMAND and DIMENSION SET messages. + + + Action validation is only supported for unicast transmissions. + + + Action validation is only supported when specifying a destination address. + + + A message is already being sent by this session. + + + No acknowledgement frame was received in the allowed time frame. + + + The gateway was too busy to process the specified frame. + + + The frame was rejected by the gateway. + + + No action validation frame was received in the allowed time frame. + + + The specified session type is not valid. + + + The specified session type is not supported by the gateway. + + + The gateway didn't acknowledge the session request. + + + The authentication method returned by the OpenWebNet gateway is not supported. + + + Authentication is required for this gateway but no password was provided. + + + The gateway returned an invalid nonce. + + + The authentication data was rejected by the gateway. + + + The gateway returned an invalid authentication hash. + + + The session request was not acknowledged in the allowed time frame. + + + The specified value is not valid. + + + This operation is not supported for Nitoo devices + + + The specified protocol is not supported by the selected gateway. + + + No gateway supporting the specified protocol is available to process the request. + + + No dimension reply was received in the allowed time frame. + + + No status reply was received in the allowed time frame. + + + The operation was rejected by the remote device. + + + The operation was not acknowledged by the remote device. + + + The frame couldn't be processed by a worker in the allowed time frame. + + + The specified stream doesn't support read operations. + + + The specified stream doesn't support write operations. + + + The pipe has been disposed. + + + A read operation is already being processed. + + + A write operation is already being processed. + + + The specified address type is not supported. + + + The identifier must be between 0 and 2^24. + + + The unit must be between 0 and 15. + + + The area must be between 0 and 10. + + + The extension must be between 0 and 15. + + + The group must be between 0 and 10. + + + The light point must be between 1 and 15. + + + The unit must be between 0 and 99. + + + The specified identifier is not a valid hexadecimal identifier. + + + The address doesn't represent a Nitoo address. + + + The address doesn't represent a valid Nitoo address. + + + The address doesn't represent a SCS light point area address. + + + The address doesn't represent a valid SCS light point area address. + + + The address doesn't represent a SCS light point general address. + + + The address doesn't represent a valid SCS light point general address. + + + The address doesn't represent a SCS light point group address. + + + The address doesn't represent a valid SCS light point group address. + + + The address doesn't represent a SCS light point point-to-point address. + + + The address doesn't represent a valid SCS light point point-to-point address. + + + The address doesn't represent a Zigbee address. + + + The address doesn't represent a valid Zigbee address. + + + The specified protocol is not supported. + + + Busy negative acknowledgements are only valid for Zigbee messages. + + + The specified WHERE field is malformed. + + + At least one value must be specified for dimension read messages. + + + At least one value must be specified for dimension set messages. + + + The specified media is not supported. + + + A serial port must be attached for gateways using serial connections. + + + An Internet Protocol endpoint must be attached for gateways using TCP connections. + + + The specified connection type is not supported. + + + An error occurred while opening the internal devices database. + + + An essential resilience property couldn't be found in the context properties. + + + An invalid or unexpected value was received. + + + The specified endpoint doesn't have the capability required to perform this action. + + + The specified OpenNetty configuration file was not found. + + + The specified OpenNetty configuration file is malformed. Make sure it specifies a 'Configuration' root node. + + + The channel was closed by the hosted service. + + + Gateway nodes can only be defined under a device node. + + + Gateways must include a non-null '{0}' attribute. + + + A non-null '{0}' attribute is required for gateways using serial port connections. + + + A non-null '{0}' attribute is required for gateways using TCP connections. + + + The specified connection type is not valid. + + + An endpoint with the name '{0}' has already been added. + + + Nitoo and Zigbee units must include a non-null '{0}' attribute. + + + '{0}' is not a valid address type. + + + An address type couldn't be automatically inferred for the endpoint '{0}'. Consider adding an explicit '{1}' attribute. + + + A non-null '{0}' attribute is required for Nitoo devices. + + + A non-null '{0}' attribute is required for Nitoo units. + + + A non-null '{0}' attribute is required for SCS light point area addresses. + + + A non-null '{0}' attribute is required for SCS light point group addresses. + + + A non-null '{0}' attribute is required for SCS light point point-to-point addresses. + + + A non-null '{0}' attribute is required for Zigbee devices. + + + A non-null '{0}' attribute is required for Zigbee units. + + + A non-null '{0}' attribute is required for endpoint capabilities. + + + A non-null '{0}' attribute is required for device nodes. + + + The device model {0} '{1}' is not valid or is not supported. + + + A non-null '{0}' attribute is required for endpoint, device or unit settings. + + + The unit '{0}' is not valid for the device {1} '{2}'. + + + A non-null '{0}' attribute is required for Nitoo scenarios. + + + The gateway '{0}' doesn't exist or is not valid. + + + The '{0}' node required when enabling the MQTT integration was not found in the configuration file. + + + A non-null '{0}' attribute is required for the MQTT configuration node. + + + This operation is not valid for open serial ports. + + + '{0}' is not a valid serial port parity. + + + '{0}' is not a valid stop bits value. + + + '{0}' is not a supported socket protocol. + + + The gateway didn't acknowledge the supervision mode request. + + + Only passwords containing ASCII digit characters (9 at most) can be used with gateways that don't support digest authentication. + + + Endpoint names cannot contain the '+' or '*' characters. + + \ No newline at end of file diff --git a/src/OpenNetty/OpenNettyScenario.cs b/src/OpenNetty/OpenNettyScenario.cs new file mode 100644 index 0000000..fedc060 --- /dev/null +++ b/src/OpenNetty/OpenNettyScenario.cs @@ -0,0 +1,53 @@ +namespace OpenNetty; + +/// +/// Represents an OpenNetty scenario (Nitoo only). +/// +public sealed class OpenNettyScenario : IEquatable +{ + /// + /// Gets or sets the endpoint name associated with the scenario. + /// + public required string EndpointName { get; init; } + + /// + /// Gets or sets the function code associated with the scenario. + /// + public required ushort FunctionCode { get; init; } + + /// + public bool Equals(OpenNettyScenario? other) + { + if (ReferenceEquals(this, other)) + { + return true; + } + + return other is not null && + string.Equals(EndpointName, other.EndpointName, StringComparison.OrdinalIgnoreCase) && + FunctionCode == other.FunctionCode; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyUnit unit && Equals(unit); + + /// + public override int GetHashCode() => HashCode.Combine(EndpointName, FunctionCode); + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyScenario? left, OpenNettyScenario? right) + => ReferenceEquals(left, right) || (left is not null && right is not null && left.Equals(right)); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyScenario? left, OpenNettyScenario? right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyService.cs b/src/OpenNetty/OpenNettyService.cs new file mode 100644 index 0000000..7136286 --- /dev/null +++ b/src/OpenNetty/OpenNettyService.cs @@ -0,0 +1,805 @@ +using System.Collections.Immutable; +using System.Reactive.Linq; +using System.Runtime.CompilerServices; +using Microsoft.Extensions.Options; +using Polly; + +namespace OpenNetty; + +/// +/// Represents a low-level service that can be used to send and receive common OpenWebNet messages. +/// +public class OpenNettyService : IOpenNettyService +{ + private readonly OpenNettyLogger _logger; + private readonly IOptionsMonitor _options; + private readonly IOpenNettyPipeline _pipeline; + + /// + /// Creates a new instance of the class. + /// + /// The OpenNetty logger. + /// The OpenNetty options. + /// The OpenNetty pipeline. + public OpenNettyService( + OpenNettyLogger logger, + IOptionsMonitor options, + IOpenNettyPipeline pipeline) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _pipeline = pipeline ?? throw new ArgumentNullException(nameof(pipeline)); + } + + /// + public virtual async IAsyncEnumerable<(OpenNettyAddress Address, ImmutableArray Values)> EnumerateDimensionsAsync( + OpenNettyProtocol protocol, + OpenNettyDimension dimension, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + Func>? filter = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (protocol is OpenNettyProtocol.Nitoo) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0030)); + } + + if (gateway is not null && gateway.Protocol != protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + // If no gateway was explicitly specified, try to resolve it from the options. + gateway ??= _options.CurrentValue.Gateways.Find(gateway => gateway.Protocol == protocol) ?? + throw new InvalidOperationException(SR.GetResourceString(SR.ID0032)); + + var message = OpenNettyMessage.CreateDimensionRequest(protocol, dimension, address, media, mode); + + // Note: acknowledgement validation is deliberately disabled while sending the DIMENSION REQUEST frame + // as it's used by the OWN gateway to indicate when it's done pushing additional DIMENSION READ frames. + options |= OpenNettyTransmissionOptions.IgnoreAcknowledgementValidation; + + var context = ResilienceContextPool.Shared.Get(cancellationToken); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyGateway)), gateway); + context.Properties.Set(new ResiliencePropertyKey>(nameof(OpenNettyLogger)), _logger); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyMessage)), message); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyTransmissionOptions)), options); + + // Monitor ACKNOWLEDGEMENT and DIMENSION READ replies received by generic and command sessions. + var notifications = _pipeline.Where(notification => notification.Gateway == gateway) + .SelectMany(async notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session: { Protocol: OpenNettyProtocol.Scs, Type: OpenNettySessionType.Command } session, + Message: { Type : OpenNettyMessageType.Acknowledgement or + OpenNettyMessageType.NegativeAcknowledgement } message } + when message.Protocol == protocol + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Scs, Type: OpenNettySessionType.Command } session, + Message: { Type : OpenNettyMessageType.DimensionRead, + Address : not null, + Dimension: not null } message } + // Note: if a filter was not explicitly set, filter out dimensions that don't match the requested one. + when message.Protocol == protocol && (filter is null || await filter(message.Dimension.Value)) + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol: OpenNettyProtocol.Zigbee, Type: OpenNettySessionType.Generic } session, + Message: { Type : OpenNettyMessageType.Acknowledgement or + OpenNettyMessageType.BusyNegativeAcknowledgement or + OpenNettyMessageType.NegativeAcknowledgement } message } + when message.Protocol == protocol + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Zigbee, Type: OpenNettySessionType.Generic } session, + Message: { Type : OpenNettyMessageType.DimensionRead, + Address : not null, + Dimension: not null } message } + // Note: if a filter was not explicitly set, filter out dimensions that don't match the requested one. + when message.Protocol == protocol && (filter is null || await filter(message.Dimension.Value)) + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + _ => AsyncObservable.Empty<(OpenNettySession Session, OpenNettyMessage Message)>() + }) + .Replay(); + + // Connect the observable before sending the message to ensure + // the notifications are not missed due to a race condition. + await using var connection = await notifications.ConnectAsync(); + + OpenNettySession session; + + try + { + session = await gateway.Options.OutgoingMessageResiliencePipeline.ExecuteAsync(async context => + await SendRawMessageAsync(message, gateway, options, context.CancellationToken), context); + } + + finally + { + ResilienceContextPool.Shared.Return(context); + } + + await foreach (var notification in notifications + .Where(notification => notification.Session == session) + .OfType<(OpenNettySession Session, OpenNettyMessage Message), (OpenNettySession Session, OpenNettyMessage Message)?>() + .Timeout(gateway.Options.MultipleDimensionReplyTimeout, AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)?>(null)) + .ToAsyncEnumerable()) + { + switch (notification?.Message.Type) + { + case null or OpenNettyMessageType.Acknowledgement: + yield break; + + case OpenNettyMessageType.BusyNegativeAcknowledgement: + throw new OpenNettyException(OpenNettyErrorCode.GatewayBusy, SR.GetResourceString(SR.ID0017)); + + case OpenNettyMessageType.NegativeAcknowledgement: + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0018)); + } + + yield return (notification.Value.Message.Address!.Value, notification.Value.Message.Values); + } + } + + /// + public virtual async IAsyncEnumerable<(OpenNettyAddress Address, OpenNettyCommand Command)> EnumerateStatusesAsync( + OpenNettyProtocol protocol, + OpenNettyCategory category, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + Func>? filter = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (protocol is OpenNettyProtocol.Nitoo) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0030)); + } + + if (gateway is not null && gateway.Protocol != protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + // If no gateway was explicitly specified, try to resolve it from the options. + gateway ??= _options.CurrentValue.Gateways.Find(gateway => gateway.Protocol == protocol) ?? + throw new InvalidOperationException(SR.GetResourceString(SR.ID0032)); + + var message = OpenNettyMessage.CreateStatusRequest(protocol, category, address, media, mode); + + // Note: acknowledgement validation is deliberately disabled while sending the STATUS REQUEST frame + // as it's used by the gateway to indicate when it's done pushing additional BUS COMMAND frames. + options |= OpenNettyTransmissionOptions.IgnoreAcknowledgementValidation; + + var context = ResilienceContextPool.Shared.Get(cancellationToken); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyGateway)), gateway); + context.Properties.Set(new ResiliencePropertyKey>(nameof(OpenNettyLogger)), _logger); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyMessage)), message); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyTransmissionOptions)), options); + + // Monitor ACKNOWLEDGEMENT and BUS COMMAND replies received by generic and command sessions. + var notifications = _pipeline.Where(notification => notification.Gateway == gateway) + .SelectMany(async notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session: { Protocol: OpenNettyProtocol.Scs, Type: OpenNettySessionType.Command } session, + Message: { Type : OpenNettyMessageType.Acknowledgement or + OpenNettyMessageType.NegativeAcknowledgement } message } + when message.Protocol == protocol + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol: OpenNettyProtocol.Scs, Type: OpenNettySessionType.Command } session, + Message: { Type : OpenNettyMessageType.BusCommand, + Command : not null, + Address : not null } message } + when message.Protocol == protocol && + // Note: if a filter was not explicitly set, filter out commands whose category doesn't match the requested one. + (filter is not null ? await filter(message.Command.Value) : message.Command.Value.Category == category) + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol: OpenNettyProtocol.Zigbee, Type: OpenNettySessionType.Generic } session, + Message: { Type : OpenNettyMessageType.Acknowledgement or + OpenNettyMessageType.BusyNegativeAcknowledgement or + OpenNettyMessageType.NegativeAcknowledgement } message } + when message.Protocol == protocol + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol: OpenNettyProtocol.Zigbee, Type: OpenNettySessionType.Generic } session, + Message: { Type : OpenNettyMessageType.BusCommand, + Command : not null, + Address : not null } message } + when message.Protocol == protocol && + // Note: if a filter was not explicitly set, filter out commands whose category doesn't match the requested one. + (filter is not null ? await filter(message.Command.Value) : message.Command.Value.Category == category) + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + _ => AsyncObservable.Empty<(OpenNettySession Session, OpenNettyMessage Message)>() + }) + .Replay(); + + // Connect the observable before sending the message to ensure + // the notifications are not missed due to a race condition. + await using var connection = await notifications.ConnectAsync(); + + OpenNettySession session; + + try + { + session = await gateway.Options.OutgoingMessageResiliencePipeline.ExecuteAsync(async context => + await SendRawMessageAsync(message, gateway, options, context.CancellationToken), context); + } + + finally + { + ResilienceContextPool.Shared.Return(context); + } + + await foreach (var notification in notifications + .Where(notification => notification.Session == session) + .OfType<(OpenNettySession Session, OpenNettyMessage Message), (OpenNettySession Session, OpenNettyMessage Message)?>() + .Timeout(gateway.Options.MultipleStatusReplyTimeout, AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)?>(null)) + .ToAsyncEnumerable()) + { + switch (notification?.Message.Type) + { + case null or OpenNettyMessageType.Acknowledgement: + yield break; + + case OpenNettyMessageType.BusyNegativeAcknowledgement: + throw new OpenNettyException(OpenNettyErrorCode.GatewayBusy, SR.GetResourceString(SR.ID0017)); + + case OpenNettyMessageType.NegativeAcknowledgement: + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0018)); + } + + yield return (notification.Value.Message.Address!.Value, notification.Value.Message.Command!.Value); + } + } + + /// + public virtual async ValueTask ExecuteCommandAsync( + OpenNettyProtocol protocol, + OpenNettyCommand command, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (gateway is not null && gateway.Protocol != protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + // If no gateway was explicitly specified, try to resolve it from the options. + gateway ??= _options.CurrentValue.Gateways.Find(gateway => gateway.Protocol == protocol) ?? + throw new InvalidOperationException(SR.GetResourceString(SR.ID0032)); + + var message = OpenNettyMessage.CreateCommand(protocol, command, address, media, mode); + + var context = ResilienceContextPool.Shared.Get(cancellationToken); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyGateway)), gateway); + context.Properties.Set(new ResiliencePropertyKey>(nameof(OpenNettyLogger)), _logger); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyMessage)), message); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyTransmissionOptions)), options); + + try + { + await gateway.Options.OutgoingMessageResiliencePipeline.ExecuteAsync(async context => + await SendRawMessageAsync(message, gateway, options, context.CancellationToken), context); + } + + finally + { + ResilienceContextPool.Shared.Return(context); + } + } + + /// + public virtual async ValueTask> GetDimensionAsync( + OpenNettyProtocol protocol, + OpenNettyDimension dimension, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + Func>? filter = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (gateway is not null && gateway.Protocol != protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + // If no gateway was explicitly specified, try to resolve it from the options. + gateway ??= _options.CurrentValue.Gateways.Find(gateway => gateway.Protocol == protocol) ?? + throw new InvalidOperationException(SR.GetResourceString(SR.ID0032)); + + var message = OpenNettyMessage.CreateDimensionRequest(protocol, dimension, address, media, mode); + + var context = ResilienceContextPool.Shared.Get(cancellationToken); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyGateway)), gateway); + context.Properties.Set(new ResiliencePropertyKey>(nameof(OpenNettyLogger)), _logger); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyMessage)), message); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyTransmissionOptions)), options); + + // Monitor ACKNOWLEDGEMENT and DIMENSION READ replies sent by the same address + // as the message was sent to and received by generic and command sessions. + var notifications = _pipeline.Where(notification => notification.Gateway == gateway) + .SelectMany(async notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Scs, Type: OpenNettySessionType.Command } session, + Message: { Type : OpenNettyMessageType.DimensionRead, + Dimension: not null } message } + when message.Protocol == protocol && message.Address == address && + // Note: if a filter was not explicitly set, filter out dimensions that don't match the requested one. + (filter is not null ? await filter(message.Dimension.Value) : message.Dimension.Value == dimension) + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Nitoo or OpenNettyProtocol.Zigbee, Type: OpenNettySessionType.Generic } session, + Message: { Type : OpenNettyMessageType.DimensionRead, + Dimension: not null } message } + when message.Protocol == protocol && message.Address == address && + // Note: if a filter was not explicitly set, filter out dimensions that don't match the requested one. + (filter is not null ? await filter(message.Dimension.Value) : message.Dimension.Value == dimension) + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + _ => AsyncObservable.Empty<(OpenNettySession Session, OpenNettyMessage Message)>() + }) + .Replay(); + + // Connect the observable before sending the message to ensure + // the notifications are not missed due to a race condition. + await using var connection = await notifications.ConnectAsync(); + + try + { + return await gateway.Options.OutgoingMessageResiliencePipeline.ExecuteAsync(async context => + { + var session = await SendRawMessageAsync(message, gateway, options, context.CancellationToken); + + return (await notifications + .FirstOrDefault(notification => notification.Session == session) + .OfType<(OpenNettySession Session, OpenNettyMessage Message), (OpenNettySession Session, OpenNettyMessage Message)?>() + .Timeout(gateway.Options.UniqueDimensionReplyTimeout, AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)?>(null)) + .RunAsync(cancellationToken))?.Message.Values ?? throw new OpenNettyException( + OpenNettyErrorCode.NoDimensionReceived, SR.GetResourceString(SR.ID0033)); + }, context); + } + + finally + { + ResilienceContextPool.Shared.Return(context); + } + } + + /// + public virtual async ValueTask GetStatusAsync( + OpenNettyProtocol protocol, + OpenNettyCategory category, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + Func>? filter = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (gateway is not null && gateway.Protocol != protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + // If no gateway was explicitly specified, try to resolve it from the options. + gateway ??= _options.CurrentValue.Gateways.Find(gateway => gateway.Protocol == protocol) ?? + throw new InvalidOperationException(SR.GetResourceString(SR.ID0032)); + + var message = OpenNettyMessage.CreateStatusRequest(protocol, category, address, media, mode); + + var context = ResilienceContextPool.Shared.Get(cancellationToken); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyGateway)), gateway); + context.Properties.Set(new ResiliencePropertyKey>(nameof(OpenNettyLogger)), _logger); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyMessage)), message); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyTransmissionOptions)), options); + + // Monitor ACKNOWLEDGEMENT and BUS COMMAND replies sent by the same address + // as the message was sent to and received by generic and command sessions. + var notifications = _pipeline.Where(notification => notification.Gateway == gateway) + .SelectMany(async notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Scs, Type: OpenNettySessionType.Command } session, + Message: { Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command } message } + when message.Protocol == protocol && message.Address == address && + // Note: if a filter was not explicitly set, filter out commands whose category doesn't match the requested one. + (filter is not null ? await filter(command) : command.Category == category) + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Nitoo or OpenNettyProtocol.Zigbee, Type: OpenNettySessionType.Generic } session, + Message: { Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command } message } + when message.Protocol == protocol && message.Address == address && + // Note: if a filter was not explicitly set, filter out commands whose category doesn't match the requested one. + (filter is not null ? await filter(command) : command.Category == category) + => AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)>((session, message)), + + _ => AsyncObservable.Empty<(OpenNettySession Session, OpenNettyMessage Message)>() + }) + .Replay(); + + // Connect the observable before sending the message to ensure + // the notifications are not missed due to a race condition. + await using var connection = await notifications.ConnectAsync(); + + try + { + return await gateway.Options.OutgoingMessageResiliencePipeline.ExecuteAsync(async context => + { + var session = await SendRawMessageAsync(message, gateway, options, context.CancellationToken); + + return (await notifications + .FirstOrDefault(notification => notification.Session == session) + .OfType<(OpenNettySession Session, OpenNettyMessage Message), (OpenNettySession Session, OpenNettyMessage Message)?>() + .Timeout(gateway.Options.UniqueStatusReplyTimeout, AsyncObservable.Return<(OpenNettySession Session, OpenNettyMessage Message)?>(null)) + .RunAsync(cancellationToken))?.Message.Command ?? throw new OpenNettyException( + OpenNettyErrorCode.NoStatusReceived, SR.GetResourceString(SR.ID0034)); + }, context); + } + + finally + { + ResilienceContextPool.Shared.Return(context); + } + } + + /// + public virtual IAsyncObservable<(OpenNettyAddress? Address, OpenNettyCommand Command)> ObserveStatusesAsync( + OpenNettyProtocol protocol, + OpenNettyCategory category, + OpenNettyGateway? gateway = null) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (gateway is not null && gateway.Protocol != protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + return _pipeline.Where(notification => gateway is null || notification.Gateway == gateway) + .SelectMany(notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session: { Protocol: OpenNettyProtocol.Scs, Type: OpenNettySessionType.Command } session, + Message: { Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address } message } + when message.Protocol == protocol && message.Category == category + => AsyncObservable.Return<(OpenNettyAddress? Address, OpenNettyCommand Command)>((address, command)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol: OpenNettyProtocol.Nitoo or OpenNettyProtocol.Zigbee, Type: OpenNettySessionType.Generic } session, + Message: { Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress address } message } + when message.Protocol == protocol && message.Category == category + => AsyncObservable.Return<(OpenNettyAddress? Address, OpenNettyCommand Command)>((address, command)), + + _ => AsyncObservable.Empty<(OpenNettyAddress? Address, OpenNettyCommand Command)>() + }) + .Retry(); + } + + /// + public virtual IAsyncObservable<(OpenNettyAddress? Address, OpenNettyDimension Dimension, ImmutableArray Values)> ObserveDimensionsAsync( + OpenNettyProtocol protocol, + OpenNettyCategory category, + OpenNettyGateway? gateway = null) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (gateway is not null && gateway.Protocol != protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + return _pipeline.Where(notification => gateway is null || notification.Gateway == gateway) + .SelectMany(notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Scs, Type: OpenNettySessionType.Command } session, + Message: { Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : [..] values } message } + when message.Protocol == protocol && message.Category == category + => AsyncObservable.Return<(OpenNettyAddress? Address, OpenNettyDimension Dimension, ImmutableArray Values)>((address, dimension, values)), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Nitoo or OpenNettyProtocol.Zigbee, Type: OpenNettySessionType.Generic } session, + Message: { Type : OpenNettyMessageType.DimensionRead, + Address : OpenNettyAddress address, + Dimension: OpenNettyDimension dimension, + Values : [..] values } message } + when message.Protocol == protocol && message.Category == category + => AsyncObservable.Return<(OpenNettyAddress? Address, OpenNettyDimension Dimension, ImmutableArray Values)>((address, dimension, values)), + + _ => AsyncObservable.Empty<(OpenNettyAddress? Address, OpenNettyDimension Dimension, ImmutableArray Values)>() + }) + .Retry(); + } + + /// + public virtual IAsyncObservable ObserveEventsAsync( + OpenNettyProtocol protocol, + OpenNettyGateway? gateway = null) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (gateway is not null && gateway.Protocol != protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + return _pipeline.Where(notification => gateway is null || notification.Gateway == gateway) + .SelectMany(notification => notification switch + { + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Scs, Type: OpenNettySessionType.Command } session, + Message: OpenNettyMessage message } + when message.Protocol == protocol => AsyncObservable.Return(message), + + OpenNettyNotifications.MessageReceived { + Session: { Protocol : OpenNettyProtocol.Nitoo or OpenNettyProtocol.Zigbee, Type: OpenNettySessionType.Generic } session, + Message: OpenNettyMessage message } + when message.Protocol == protocol && + message.Type is not (OpenNettyMessageType.Acknowledgement or + OpenNettyMessageType.BusyNegativeAcknowledgement or + OpenNettyMessageType.NegativeAcknowledgement) + => AsyncObservable.Return(message), + + _ => AsyncObservable.Empty() + }) + .Retry(); + } + + /// + public virtual async ValueTask SendMessageAsync( + OpenNettyMessage message, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(message); + + if (gateway is not null && gateway.Protocol != message.Protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + // If no gateway was explicitly specified, try to resolve it from the options. + gateway ??= _options.CurrentValue.Gateways.Find(gateway => gateway.Protocol == message.Protocol) ?? + throw new InvalidOperationException(SR.GetResourceString(SR.ID0032)); + + var context = ResilienceContextPool.Shared.Get(cancellationToken); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyGateway)), gateway); + context.Properties.Set(new ResiliencePropertyKey>(nameof(OpenNettyLogger)), _logger); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyMessage)), message); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyTransmissionOptions)), options); + + try + { + await gateway.Options.OutgoingMessageResiliencePipeline.ExecuteAsync(async context => + await SendRawMessageAsync(message, gateway, options, context.CancellationToken), context); + } + + finally + { + ResilienceContextPool.Shared.Return(context); + } + } + + /// + public virtual async ValueTask SetDimensionAsync( + OpenNettyProtocol protocol, + OpenNettyDimension dimension, + ImmutableArray values, + OpenNettyAddress? address = null, + OpenNettyMedia? media = null, + OpenNettyMode? mode = null, + OpenNettyGateway? gateway = null, + OpenNettyTransmissionOptions options = OpenNettyTransmissionOptions.None, + CancellationToken cancellationToken = default) + { + if (!Enum.IsDefined(protocol)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0064)); + } + + if (gateway is not null && gateway.Protocol != protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0031)); + } + + // If no gateway was explicitly specified, try to resolve it from the options. + gateway ??= _options.CurrentValue.Gateways.Find(gateway => gateway.Protocol == protocol) ?? + throw new InvalidOperationException(SR.GetResourceString(SR.ID0032)); + + var message = OpenNettyMessage.CreateDimensionSet(protocol, dimension, values, address, media, mode); + + var context = ResilienceContextPool.Shared.Get(cancellationToken); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyGateway)), gateway); + context.Properties.Set(new ResiliencePropertyKey>(nameof(OpenNettyLogger)), _logger); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyMessage)), message); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyTransmissionOptions)), options); + + try + { + await gateway.Options.OutgoingMessageResiliencePipeline.ExecuteAsync(async context => + await SendRawMessageAsync(message, gateway, options, context.CancellationToken), context); + } + + finally + { + ResilienceContextPool.Shared.Return(context); + } + } + + /// + /// Sends a raw OpenNetty message and waits until it is processed by a worker. + /// + /// The message. + /// The gateway used to send the message. + /// The transmission options to use. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the asynchronous operation + /// and whose result returns the session used by the worker to send the message to the gateway. + /// + /// An error occurred while processing the message. + protected virtual async ValueTask SendRawMessageAsync( + OpenNettyMessage message, + OpenNettyGateway gateway, + OpenNettyTransmissionOptions options, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(message); + ArgumentNullException.ThrowIfNull(gateway); + + switch (message) + { + // Note: Zigbee gateways are affected by a bug that affects how STATUS REQUEST frames are handled + // and prevents a proper acknowledgement frame from being returned. To ensure the session is not + // blocked until the timeout is reached when sending STATUS REQUEST frames, acknowledgement validation + // is deliberately disabled: in this case, the requests are assumed to be accepted by the gateway. + case { Protocol: OpenNettyProtocol.Zigbee, Type: OpenNettyMessageType.StatusRequest }: + + // Note: Nitoo gateways don't return acknowledgement frames for these specific dimensions: + case { Protocol : OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.DimensionRequest, + Address : null, + Dimension: OpenNettyDimension dimension } + when dimension == OpenNettyDimensions.Management.FirmwareVersion || + dimension == OpenNettyDimensions.Management.HardwareVersion || + dimension == OpenNettyDimensions.Management.DeviceIdentifier: + options |= OpenNettyTransmissionOptions.IgnoreAcknowledgementValidation; + break; + } + + // Create a new transaction that will be used to correlate the + // incoming notifications with the message that is being sent. + var transaction = OpenNettyTransaction.Create(); + + var notifications = _pipeline + .Where(notification => notification.Gateway == gateway) + .Where(notification => notification switch + { + OpenNettyNotifications.GatewayBusy value => value.Transaction == transaction, + OpenNettyNotifications.InvalidAction value => value.Transaction == transaction, + OpenNettyNotifications.InvalidFrame value => value.Transaction == transaction, + OpenNettyNotifications.MessageSent value => value.Transaction == transaction, + OpenNettyNotifications.NoAcknowledgmentReceived value => value.Transaction == transaction, + OpenNettyNotifications.NoActionReceived value => value.Transaction == transaction, + + _ => false + }) + .Replay(); + + // Connect the observable before sending the message to ensure + // the notifications are not missed due to a race condition. + await using var connection = await notifications.ConnectAsync(); + + // Inform the workers that a frame needs to be processed. + var notification = new OpenNettyNotifications.MessageReady + { + Gateway = gateway, + Message = message, + Options = options, + Transaction = transaction + }; + + await _pipeline.PublishAsync(notification, cancellationToken); + + // Retrieve the notification indicating whether the session acknowledged or rejected the message. + // If no notification is received, assume the message couldn't be processed by a worker. + switch (await notifications + .FirstOrDefault() + .Timeout(gateway.Options.OutgoingMessageProcessingTimeout, AsyncObservable.Return(default(OpenNettyNotification))) + .RunAsync(cancellationToken)) + { + case OpenNettyNotifications.MessageSent { Session: OpenNettySession session }: + return session; + + case OpenNettyNotifications.GatewayBusy: + throw new OpenNettyException(OpenNettyErrorCode.GatewayBusy, SR.GetResourceString(SR.ID0017)); + + case OpenNettyNotifications.InvalidAction: + throw new OpenNettyException(OpenNettyErrorCode.InvalidAction, SR.GetResourceString(SR.ID0035)); + + case OpenNettyNotifications.InvalidFrame: + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0018)); + + case OpenNettyNotifications.NoAcknowledgmentReceived: + throw new OpenNettyException(OpenNettyErrorCode.NoAcknowledgementReceived, SR.GetResourceString(SR.ID0016)); + + case OpenNettyNotifications.NoActionReceived: + throw new OpenNettyException(OpenNettyErrorCode.NoActionReceived, SR.GetResourceString(SR.ID0036)); + + case null or _: + throw new OpenNettyException(OpenNettyErrorCode.NoWorkerAvailable, SR.GetResourceString(SR.ID0037)); + } + } +} + diff --git a/src/OpenNetty/OpenNettySession.cs b/src/OpenNetty/OpenNettySession.cs new file mode 100644 index 0000000..522f9a0 --- /dev/null +++ b/src/OpenNetty/OpenNettySession.cs @@ -0,0 +1,675 @@ +using System.Globalization; +using System.Reactive.Concurrency; +using System.Reactive.Linq; +using System.Reactive.Subjects; +using System.Runtime.InteropServices; +using System.Security.Cryptography; +using System.Text; + +namespace OpenNetty; + +/// +/// Represents an observable session to an OpenWebNet gateway. +/// +public sealed class OpenNettySession : IConnectableAsyncObservable, IEquatable, IAsyncDisposable +{ + private OpenNettyConnection? _connection; + private readonly OpenNettyGateway _gateway; + private readonly IConnectableAsyncObservable _observable; + private readonly SemaphoreSlim _semaphore = new(initialCount: 1, maxCount: 1); + private readonly CancellationTokenSource _source = new(); + private readonly OpenNettySessionType _type; + + /// + /// Creates a new instance of the class. + /// + /// The gateway. + /// The session type. + /// The connection. + private OpenNettySession( + OpenNettyGateway gateway, + OpenNettySessionType type, + OpenNettyConnection connection) + { + ArgumentNullException.ThrowIfNull(gateway); + ArgumentNullException.ThrowIfNull(connection); + + _connection = connection; + _gateway = gateway; + _observable = AsyncObservable.Create(observer => + { + return TaskPoolAsyncScheduler.Default.ScheduleAsync(async cancellationToken => + { + using var source = CancellationTokenSource.CreateLinkedTokenSource(_source.Token, cancellationToken); + + while (!source.Token.IsCancellationRequested) + { + OpenNettyFrame? frame; + + try + { + frame = await _connection.ReceiveAsync(source.Token); + } + + catch (OperationCanceledException) when (source.Token.IsCancellationRequested) + { + await observer.OnCompletedAsync(); + return; + } + + catch (Exception exception) + { + await observer.OnErrorAsync(exception); + continue; + } + + if (frame is null) + { + await observer.OnCompletedAsync(); + return; + } + + OpenNettyMessage message; + + try + { + message = OpenNettyMessage.CreateFromFrame(gateway.Protocol, frame.GetValueOrDefault()); + } + + catch (Exception exception) + { + await observer.OnErrorAsync(exception); + continue; + } + + await observer.OnNextAsync(message); + } + }); + }) + .Retry() + .Multicast(new ConcurrentSimpleAsyncSubject()); + + _type = type; + } + + /// + /// Gets the gateway used by this session. + /// + public OpenNettyGateway Gateway => _gateway; + + /// + /// Gets the protocol used by this session. + /// + public OpenNettyProtocol Protocol => _gateway.Protocol; + + /// + /// Gets the type of session negotiated with the gateway. + /// + public OpenNettySessionType Type => _type; + + /// + /// Gets the unique identifier associated to the current session. + /// + public Guid Id { get; } = Guid.NewGuid(); + + /// + /// Sends the specified message to the gateway. + /// + /// The message. + /// The transmission options. + /// The that can be used to abort the operation. + /// Note: concurrent calls to this API are not allowed. + /// A that can be used to monitor the asynchronous operation. + /// Invalid transmission options are specified. + /// The session is disposed. + /// An error occurred while sending the message. + public async ValueTask SendAsync( + OpenNettyMessage message, + OpenNettyTransmissionOptions options = default, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(message, nameof(message)); + + if (_connection is not OpenNettyConnection connection) + { + throw new ObjectDisposedException(SR.GetResourceString(SR.ID0009)); + } + + if (message.Protocol != _gateway.Protocol) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0010)); + } + + var address = message.Address; + + if (options.HasFlag(OpenNettyTransmissionOptions.RequireActionValidation)) + { + if (message.Protocol is not OpenNettyProtocol.Nitoo) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0011)); + } + + if (message.Type is not (OpenNettyMessageType.BusCommand or OpenNettyMessageType.DimensionSet)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0012)); + } + + if (message.Mode is OpenNettyMode.Broadcast or OpenNettyMode.Multicast) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0013)); + } + + if (address is null) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0014)); + } + } + + if (!await _semaphore.WaitAsync(TimeSpan.Zero, cancellationToken)) + { + throw new InvalidOperationException(SR.GetResourceString(SR.ID0015)); + } + + try + { + var messages = _observable.ObserveOn(TaskPoolAsyncScheduler.Default) + .Where(message => message switch + { + { Protocol: OpenNettyProtocol.Nitoo or OpenNettyProtocol.Scs, + Type : OpenNettyMessageType.Acknowledgement or OpenNettyMessageType.NegativeAcknowledgement } + when !options.HasFlag(OpenNettyTransmissionOptions.IgnoreAcknowledgementValidation) => true, + + { Protocol: OpenNettyProtocol.Zigbee, + Type : OpenNettyMessageType.Acknowledgement or + OpenNettyMessageType.BusyNegativeAcknowledgement or + OpenNettyMessageType.NegativeAcknowledgement } + when !options.HasFlag(OpenNettyTransmissionOptions.IgnoreAcknowledgementValidation) => true, + + { Protocol: OpenNettyProtocol.Nitoo, + Type : OpenNettyMessageType.BusCommand, + Command : OpenNettyCommand command, + Address : OpenNettyAddress } + when options.HasFlag(OpenNettyTransmissionOptions.RequireActionValidation) && + (command == OpenNettyCommands.Diagnostics.ValidAction || + command == OpenNettyCommands.Diagnostics.InvalidAction) && + message.Address == address => true, + + _ => false + }) + .Replay(); + + // Connect the observable just before sending the frame to ensure the acknowledgement + // and validation replies, if applicable, are not missed due to a race condition. + await using (await messages.ConnectAsync()) + { + await connection.SendAsync(message.Frame, cancellationToken); + + if (!options.HasFlag(OpenNettyTransmissionOptions.IgnoreAcknowledgementValidation)) + { + switch (await messages + .FirstOrDefault(static message => message.Type is OpenNettyMessageType.Acknowledgement or + OpenNettyMessageType.BusyNegativeAcknowledgement or + OpenNettyMessageType.NegativeAcknowledgement) + .Timeout(_gateway.Options.FrameAcknowledgementTimeout, AsyncObservable.Return(null)) + .RunAsync(cancellationToken)) + { + case null: + throw new OpenNettyException(OpenNettyErrorCode.NoAcknowledgementReceived, SR.GetResourceString(SR.ID0016)); + + case { Type: OpenNettyMessageType.BusyNegativeAcknowledgement }: + throw new OpenNettyException(OpenNettyErrorCode.GatewayBusy, SR.GetResourceString(SR.ID0017)); + + case { Type: OpenNettyMessageType.NegativeAcknowledgement }: + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0018)); + } + } + + if (options.HasFlag(OpenNettyTransmissionOptions.RequireActionValidation)) + { + switch (await messages + .FirstOrDefault(static message => + message.Type is OpenNettyMessageType.BusCommand && + (message.Command == OpenNettyCommands.Diagnostics.ValidAction || + message.Command == OpenNettyCommands.Diagnostics.InvalidAction)) + .Timeout(_gateway.Options.ActionValidationTimeout, AsyncObservable.Return(null)) + .RunAsync(cancellationToken)) + { + case null: + throw new OpenNettyException(OpenNettyErrorCode.NoActionReceived, SR.GetResourceString(SR.ID0019)); + + case { Command: OpenNettyCommand command } when command == OpenNettyCommands.Diagnostics.InvalidAction: + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0018)); + } + } + } + + // If the gateway options indicate that a post-sending delay must be enforced, apply it immediately. + if (_gateway.Options.PostSendingDelay != TimeSpan.Zero && + !options.HasFlag(OpenNettyTransmissionOptions.DisablePostSendingDelay)) + { + await Task.Delay(_gateway.Options.PostSendingDelay, cancellationToken); + } + } + + finally + { + _semaphore.Release(); + } + } + + /// + /// Creates and initializes a new session to the specified gateway. + /// + /// The gateway. + /// The session type. + /// The that can be used to abort the operation. + /// + /// A that can be used to monitor the + /// asynchronous operation and whose result returns the created session. + /// + /// The gateway is . + /// The session type is invalid. + /// An error occurred while establishing the session. + public static async ValueTask CreateAsync( + OpenNettyGateway gateway, OpenNettySessionType type, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(gateway); + + switch (type) + { + case not (OpenNettySessionType.Command or OpenNettySessionType.Generic or OpenNettySessionType.Event): + throw new ArgumentOutOfRangeException(nameof(type), SR.GetResourceString(SR.ID0020)); + + case OpenNettySessionType.Command when !gateway.Device.Definition.Capabilities.Contains(OpenNettyCapabilities.OpenWebNetCommandSession): + case OpenNettySessionType.Generic when !gateway.Device.Definition.Capabilities.Contains(OpenNettyCapabilities.OpenWebNetGenericSession): + case OpenNettySessionType.Event when !gateway.Device.Definition.Capabilities.Contains(OpenNettyCapabilities.OpenWebNetEventSession): + throw new InvalidOperationException(SR.GetResourceString(SR.ID0021)); + } + + using var source = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + + if (gateway.Options.ConnectionNegotiationTimeout != Timeout.InfiniteTimeSpan) + { + source.CancelAfter(gateway.Options.ConnectionNegotiationTimeout); + } + + // Create a new connection that will be managed by the returned object. + var connection = await OpenNettyConnection.CreateAsync(gateway, source.Token); + + try + { + if (type is OpenNettySessionType.Generic) + { + // If the supervision mode was enabled, enforce it when creating the session to ensure the + // connection is working properly and receive all the state changes sent by the devices. + if (gateway.Options.EnableSupervisionMode) + { + await connection.SendAsync(new OpenNettyFrame( + new OpenNettyField(new OpenNettyParameter("13")), + new OpenNettyField(new OpenNettyParameter("66")), + new OpenNettyField(OpenNettyParameter.Empty)), source.Token); + + // Ensure the server acknowledged the supervision mode request. + if (await connection.ReceiveAsync(source.Token) != OpenNettyFrames.Acknowledgement) + { + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0109)); + } + } + + // Otherwise, ask the gateway to return its firmware version to ensure the connection is working properly. + else + { + await connection.SendAsync(new OpenNettyFrame( + new OpenNettyField(OpenNettyParameter.Empty, new OpenNettyParameter("13")), + new OpenNettyField(OpenNettyParameter.Empty), + new OpenNettyField(new OpenNettyParameter("16"))), source.Token); + + // Note: Nitoo gateways don't return acknowledgement frames for firmware version requests. + if (gateway.Protocol is OpenNettyProtocol.Nitoo) + { + if (await connection.ReceiveAsync(source.Token) is not + { Fields: [{ Parameters: [{ IsEmpty: true }, { Value: "13" }] }, + { Parameters: [{ IsEmpty: true }] }, + { Parameters: [{ Value: "16" }] }, + { Parameters: [{ Value.Length: > 0 }] }, + { Parameters: [{ Value.Length: > 0 }] }, + { Parameters: [{ Value.Length: > 0 }] }] }) + { + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0022)); + } + } + + else + { + // Note: the acknowledgement frame may be returned before or after the firmware version. + var frame = await connection.ReceiveAsync(source.Token); + if (frame == OpenNettyFrames.Acknowledgement) + { + if (await connection.ReceiveAsync(source.Token) is not + { Fields: [{ Parameters: [{ IsEmpty: true }, { Value: "13" }] }, + { Parameters: [{ IsEmpty: true }] }, + { Parameters: [{ Value: "16" }] }, + { Parameters: [{ Value.Length: > 0 }] }, + { Parameters: [{ Value.Length: > 0 }] }, + { Parameters: [{ Value.Length: > 0 }] }] }) + { + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0022)); + } + } + + else if (frame is { Fields: [{ Parameters: [{ IsEmpty: true }, { Value: "13" }] }, + { Parameters: [{ IsEmpty: true }] }, + { Parameters: [{ Value: "16" }] }, + { Parameters: [{ Value.Length: > 0 }] }, + { Parameters: [{ Value.Length: > 0 }] }, + { Parameters: [{ Value.Length: > 0 }] }] }) + { + if (await connection.ReceiveAsync(source.Token) != OpenNettyFrames.Acknowledgement) + { + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0022)); + } + } + + else + { + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0022)); + } + } + } + } + + else + { + // Ensure the server acknowledged the connection request. + if (await connection.ReceiveAsync(source.Token) != OpenNettyFrames.Acknowledgement) + { + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0022)); + } + + // Negotiate the requested session type. + await connection.SendAsync(new OpenNettyFrame( + new OpenNettyField(new OpenNettyParameter("99")), + new OpenNettyField(new OpenNettyParameter(type switch + { + OpenNettySessionType.Command => "9", + OpenNettySessionType.Event => "1", + _ => "0" + }))), source.Token); + + switch (await connection.ReceiveAsync(source.Token)) + { + // If the client IP address was whitelisted, authentication is not required and + // an ACK frame is directly returned by the OpenWebNet gateway to reflect that. + case OpenNettyFrame frame when frame == OpenNettyFrames.Acknowledgement: + break; + + // If the client IP address wasn't whitelisted and the server requires using "HMAC authentication" (that + // isn't based on the standard HMAC-SHA1 or HMAC-SHA256 algorithms but is actually a variant of digest + // authentication), extract the returned method algorithm and authenticate using SHA1 or SHA256 digests. + case { Fields: [{ Parameters: [{ Value: "98" }] }, { Parameters: [{ Value: { Length: > 0 } method }] }] }: + { + using HashAlgorithm algorithm = method switch + { + "1" => SHA1.Create(), + "2" => SHA256.Create(), + _ => throw new OpenNettyException(OpenNettyErrorCode.AuthenticationMethodUnsupported, SR.GetResourceString(SR.ID0023)) + }; + + // Ensure a password was attached to the gateway instance. + if (string.IsNullOrEmpty(gateway.Password)) + { + throw new OpenNettyException(OpenNettyErrorCode.AuthenticationRequired, SR.GetResourceString(SR.ID0024)); + } + + // Acknowledge the negotiated authentication algorithm. + await connection.SendAsync(OpenNettyFrames.Acknowledgement, source.Token); + + // Extract the server authentication nonce returned by the gateway. + if (await connection.ReceiveAsync(source.Token) is not { Fields: [{ Parameters: [{ IsEmpty: true }, { Value: { Length: > 0 } nonce }] }] }) + { + throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0025)); + } + + // Ensure the returned nonce has a correct size and generate a random client nonce using a CSP. + var parameters = ( + ServerNonce: ConvertFromDigits(nonce) switch + { + { Length: int length } result when length * 4 == algorithm.HashSize => result, + + _ => throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0025)), + }, + ClientNonce: RandomNumberGenerator.GetBytes(algorithm.HashSize / 8)); + + // Compute the hash of the OPEN password and convert it to its lowercase hexadecimal representation. + var password = Convert.ToHexString(algorithm.ComputeHash(Encoding.UTF8.GetBytes(gateway.Password))).ToLowerInvariant(); + + // Compute and send the digest used to authenticate the client. + await connection.SendAsync(new OpenNettyFrame( + new OpenNettyField(OpenNettyParameter.Empty, new OpenNettyParameter(ConvertToDigits(parameters.ClientNonce))), + new OpenNettyField(new OpenNettyParameter(ConvertToDigits(algorithm.ComputeHash(Encoding.UTF8.GetBytes(new StringBuilder() + .Append(parameters.ServerNonce) + .Append(Convert.ToHexString(parameters.ClientNonce).ToLowerInvariant()) + .Append("736F70653E") + .Append("636F70653E") + .Append(password) + .ToString())))))), source.Token); + + // Extract the server authentication digest returned by the gateway + // and validate it to ensure it matches the expected value. + switch (await connection.ReceiveAsync(source.Token)) + { + case { Fields: [{ Parameters: [{ IsEmpty: true }, { Value: { Length: > 0 } digest }] }] } + when CryptographicOperations.FixedTimeEquals( + left : MemoryMarshal.AsBytes(digest), + right: MemoryMarshal.AsBytes(ConvertToDigits(algorithm.ComputeHash(Encoding.UTF8.GetBytes(new StringBuilder() + .Append(parameters.ServerNonce) + .Append(Convert.ToHexString(parameters.ClientNonce).ToLowerInvariant()) + .Append(password) + .ToString()))))): + // Acknowledge the negotiated authentication data. + await connection.SendAsync(OpenNettyFrames.Acknowledgement, source.Token); + break; + + case null: + case OpenNettyFrame frame when frame == OpenNettyFrames.NegativeAcknowledgement: + throw new OpenNettyException(OpenNettyErrorCode.AuthenticationInvalid, SR.GetResourceString(SR.ID0026)); + + default: throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0027)); + } + break; + } + + // If the client IP address wasn't whitelisted and the gateway requires using the legacy + // "OPEN authentication" method, extract the nonce and authenticate using the password. + case { Fields: [{ Parameters: [{ IsEmpty: true }, { Value: { Length: > 0 } nonce }] }] }: + { + // Ensure a password was attached to the gateway instance. + if (string.IsNullOrEmpty(gateway.Password)) + { + throw new OpenNettyException(OpenNettyErrorCode.AuthenticationRequired, SR.GetResourceString(SR.ID0024)); + } + + // Ensure the password only includes at most 9 ASCII digit characters as non-digit + // characters are not supported when using the legacy authentication method. + if (gateway.Password.Any(static character => !char.IsAsciiDigit(character)) || + gateway.Password.Length > 9 || + !uint.TryParse(gateway.Password, CultureInfo.InvariantCulture, out uint password)) + { + throw new OpenNettyException(OpenNettyErrorCode.AuthenticationInvalid, SR.GetResourceString(SR.ID0110)); + } + + // Compute and send the obfuscated password used to authenticate the client. + await connection.SendAsync(new OpenNettyFrame( + new OpenNettyField( + OpenNettyParameter.Empty, + new OpenNettyParameter(nonce.Aggregate(password, static (password, character) => character switch + { + '1' => (password >> 7) | (password << 25), + '2' => (password >> 4) | (password << 28), + '3' => (password >> 3) | (password << 29), + '4' => (password << 1) | (password >> 31), + '5' => (password << 5) | (password >> 27), + '6' => (password << 12) | (password >> 20), + '7' => (password & 0x0000FF00) | (password << 24) | (password & 0x00FF0000) >> 16 | (password & 0xFF000000) >> 8, + '8' => (password << 16) | (password >> 24) | ((password & 0x00FF0000) >> 8), + '9' => ~password, + _ => password + }).ToString(CultureInfo.InvariantCulture)))), source.Token); + + // Ensure the server acknowledged the authentication demand. + switch (await connection.ReceiveAsync(source.Token)) + { + case OpenNettyFrame frame when frame == OpenNettyFrames.Acknowledgement: + break; + + case null: + case OpenNettyFrame frame when frame == OpenNettyFrames.NegativeAcknowledgement: + throw new OpenNettyException(OpenNettyErrorCode.AuthenticationInvalid, SR.GetResourceString(SR.ID0026)); + + default: throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0027)); + } + break; + } + + default: throw new OpenNettyException(OpenNettyErrorCode.InvalidFrame, SR.GetResourceString(SR.ID0022)); + } + } + + return new OpenNettySession(gateway, type, connection); + } + + catch (OperationCanceledException) when (!cancellationToken.IsCancellationRequested) + { + await connection.DisposeAsync(); + + throw new OpenNettyException(OpenNettyErrorCode.NegotiationTimeout, SR.GetResourceString(SR.ID0028)); + } + + catch (Exception) + { + await connection.DisposeAsync(); + + throw; + } + + static string ConvertFromDigits(ReadOnlySpan value) + { + if (value.Length % 4 is not 0) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0029), nameof(value)); + } + + var builder = new StringBuilder(); + + for (var index = 0; index < value.Length; index += 2) + { + if (!int.TryParse(value.Slice(index, 2), CultureInfo.InvariantCulture, out int result)) + { + throw new ArgumentException(SR.GetResourceString(SR.ID0029), nameof(value)); + } + + builder.Append(result.ToString("x", CultureInfo.InvariantCulture)); + } + + return builder.ToString(); + } + + static string ConvertToDigits(ReadOnlySpan value) + { + var builder = new StringBuilder(); + + var span = Convert.ToHexString(value).AsSpan(); + + for (var index = 0; index < span.Length; index++) + { + var digit = int.Parse(span.Slice(index, 1), NumberStyles.HexNumber, CultureInfo.InvariantCulture); + builder.Append(digit.ToString("00", CultureInfo.InvariantCulture)); + } + + return builder.ToString(); + } + } + + /// + /// Connects the so that incoming + /// frames can start being processed by the registered observers. + /// + /// + /// A that can be used to monitor the + /// asynchronous operation and whose result is used as a signal by the caller + /// to inform the session that no additional frame will be processed. + /// + public ValueTask ConnectAsync() => _observable.ConnectAsync(); + + /// + /// Subscribes to incoming frames. + /// + /// + /// A that can be used to monitor the + /// asynchronous operation and whose result is used as a signal by the caller + /// to inform the session that no additional frame will be processed. + /// + public ValueTask SubscribeAsync(IAsyncObserver observer) + => _observable.ObserveOn(TaskPoolAsyncScheduler.Default).SubscribeAsync(observer); + + /// + /// Releases the session. + /// + /// A that can be used to monitor the asynchronous operation. + public async ValueTask DisposeAsync() + { + if (Interlocked.Exchange(ref _connection, null) is OpenNettyConnection connection) + { + await connection.DisposeAsync(); + + _semaphore.Dispose(); + _source.Cancel(); + _source.Dispose(); + } + } + + /// + public bool Equals(OpenNettySession? other) + { + if (ReferenceEquals(this, other)) + { + return true; + } + + if (other is null) + { + return false; + } + + return Id == other.Id; + } + + /// + public override bool Equals(object? obj) => obj is OpenNettySession session && Equals(session); + + /// + public override int GetHashCode() => Id.GetHashCode(); + + /// + /// Computes the representation of the current session. + /// + /// The representation of the current session. + public override string ToString() => Id.ToString(); + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettySession? left, OpenNettySession? right) + => ReferenceEquals(left, right) || (left is not null && right is not null && left.Equals(right)); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettySession? left, OpenNettySession? right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettySessionType.cs b/src/OpenNetty/OpenNettySessionType.cs new file mode 100644 index 0000000..6162578 --- /dev/null +++ b/src/OpenNetty/OpenNettySessionType.cs @@ -0,0 +1,22 @@ +namespace OpenNetty; + +/// +/// Represents the type of an OpenNetty session. +/// +public enum OpenNettySessionType +{ + /// + /// The session doesn't have a specific type. + /// + Generic = 0, + + /// + /// The session is a command session. + /// + Command = 1, + + /// + /// The session is an event session. + /// + Event = 2 +} diff --git a/src/OpenNetty/OpenNettySetting.cs b/src/OpenNetty/OpenNettySetting.cs new file mode 100644 index 0000000..e5dd0b0 --- /dev/null +++ b/src/OpenNetty/OpenNettySetting.cs @@ -0,0 +1,54 @@ +namespace OpenNetty; + +/// +/// Represents a setting attached to an OpenNetty endpoint, device or unit. +/// +public readonly struct OpenNettySetting : IEquatable +{ + /// + /// Creates a new instance of the structure. + /// + /// The setting name. + public OpenNettySetting(string name) + { + ArgumentException.ThrowIfNullOrEmpty(name); + + Name = name; + } + + /// + /// Gets the name associated with the setting. + /// + public string Name { get; } + + /// + public bool Equals(OpenNettySetting other) => string.Equals(Name, other.Name, StringComparison.OrdinalIgnoreCase); + + /// + public override bool Equals(object? obj) => obj is OpenNettySetting setting && Equals(setting); + + /// + public override int GetHashCode() => Name?.GetHashCode() ?? 0; + + /// + /// Computes the representation of the current setting. + /// + /// The representation of the current setting. + public override string ToString() => Name?.ToString() ?? string.Empty; + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettySetting left, OpenNettySetting right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettySetting left, OpenNettySetting right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettySettings.cs b/src/OpenNetty/OpenNettySettings.cs new file mode 100644 index 0000000..f2921e9 --- /dev/null +++ b/src/OpenNetty/OpenNettySettings.cs @@ -0,0 +1,37 @@ +namespace OpenNetty; + +/// +/// Exposes common settings supported by OpenNetty. +/// +public static class OpenNettySettings +{ + /// + /// Action validation (Nitoo only). + /// + public static readonly OpenNettySetting ActionValidation = new("Action validation"); + + /// + /// Serial port baud rate. + /// + public static readonly OpenNettySetting SerialPortBaudRate = new("Serial port baud rate"); + + /// + /// Serial port data bits. + /// + public static readonly OpenNettySetting SerialPortDataBits = new("Serial port data bits"); + + /// + /// Serial port parity. + /// + public static readonly OpenNettySetting SerialPortParity = new("Serial port parity"); + + /// + /// Serial port stop bits. + /// + public static readonly OpenNettySetting SerialPortStopBits = new("Serial port stop bits"); + + /// + /// Switch mode. + /// + public static readonly OpenNettySetting SwitchMode = new("Switch mode"); +} diff --git a/src/OpenNetty/OpenNettyTransaction.cs b/src/OpenNetty/OpenNettyTransaction.cs new file mode 100644 index 0000000..7fa2385 --- /dev/null +++ b/src/OpenNetty/OpenNettyTransaction.cs @@ -0,0 +1,58 @@ +using System.Diagnostics; + +namespace OpenNetty; + +/// +/// Represents an OpenNetty transaction that can be used to correlate multiple OpenNetty notifications. +/// +[DebuggerDisplay("{ToString(),nq}")] +public readonly struct OpenNettyTransaction : IEquatable +{ + /// + /// Creates a new instance of the structure. + /// + /// The GUID used to identify the transaction. + public OpenNettyTransaction(Guid identifier) => Identifier = identifier; + + /// + /// Gets the GUID that identifies the transaction. + /// + public Guid Identifier { get; } + + /// + /// Creates a new instance. + /// + /// A new instance. + public static OpenNettyTransaction Create() => new(Guid.NewGuid()); + + /// + public bool Equals(OpenNettyTransaction transaction) => Identifier == transaction.Identifier; + + /// + public override bool Equals(object? obj) => obj is OpenNettyTransaction transaction && Equals(transaction); + + /// + public override int GetHashCode() => Identifier.GetHashCode(); + + /// + /// Computes the representation of the current transaction. + /// + /// The representation of the current transaction. + public override string ToString() => Identifier.ToString(); + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyTransaction left, OpenNettyTransaction right) => left.Equals(right); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyTransaction left, OpenNettyTransaction right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyTransmissionOptions.cs b/src/OpenNetty/OpenNettyTransmissionOptions.cs new file mode 100644 index 0000000..db0ec7e --- /dev/null +++ b/src/OpenNetty/OpenNettyTransmissionOptions.cs @@ -0,0 +1,34 @@ +namespace OpenNetty; + +/// +/// Exposes common OpenNetty transmission options that can +/// be used to control how messages are sent by a session. +/// +[Flags] +public enum OpenNettyTransmissionOptions +{ + /// + /// Default options. + /// + None = 0, + + /// + /// Do no wait for the gateway to return an ACK, BUSYACK or NACK frame. + /// + IgnoreAcknowledgementValidation = 0x01, + + /// + /// Wait for the end device to reply with a VALID ACTION or INVALID ACTION frame (Nitoo only). + /// + RequireActionValidation = 0x02, + + /// + /// Do no add an additional delay after sending the message. + /// + DisablePostSendingDelay = 0x03, + + /// + /// Prevent the message from being replayed if an error occurs while sending it. + /// + DisallowRetransmissions = 0x04 +} diff --git a/src/OpenNetty/OpenNettyUnit.cs b/src/OpenNetty/OpenNettyUnit.cs new file mode 100644 index 0000000..12ad90a --- /dev/null +++ b/src/OpenNetty/OpenNettyUnit.cs @@ -0,0 +1,81 @@ +using System.Collections.Immutable; + +namespace OpenNetty; + +/// +/// Represents an OpenNetty unit. +/// +public sealed class OpenNettyUnit : IEquatable +{ + /// + /// Gets or sets the unit definition associated with the unit. + /// + public required OpenNettyUnitDefinition Definition { get; init; } + + /// + /// Gets or sets the scenarios associated with the unit, if applicable. + /// + public ImmutableArray Scenarios { get; init; } = []; + + /// + /// Gets or sets the user-defined settings associated with the unit, if applicable. + /// + public ImmutableDictionary Settings { get; init; } = + ImmutableDictionary.Empty; + + /// + public bool Equals(OpenNettyUnit? other) + { + if (ReferenceEquals(this, other)) + { + return true; + } + + return other is not null && + Definition == other.Definition && + Scenarios.Length == other.Scenarios.Length && !Scenarios.Except(other.Scenarios).Any() && + Settings.Count == other.Settings.Count && !Settings.Except(other.Settings).Any(); + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyUnit unit && Equals(unit); + + /// + public override int GetHashCode() + { + var hash = new HashCode(); + hash.Add(Definition); + + hash.Add(Scenarios.Length); + foreach (var scenario in Scenarios) + { + hash.Add(scenario); + } + + hash.Add(Settings.Count); + foreach (var (name, value) in Settings) + { + hash.Add(name); + hash.Add(value); + } + + return hash.ToHashCode(); + } + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyUnit? left, OpenNettyUnit? right) + => ReferenceEquals(left, right) || (left is not null && right is not null && left.Equals(right)); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyUnit? left, OpenNettyUnit? right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyUnitDefinition.cs b/src/OpenNetty/OpenNettyUnitDefinition.cs new file mode 100644 index 0000000..d5553c3 --- /dev/null +++ b/src/OpenNetty/OpenNettyUnitDefinition.cs @@ -0,0 +1,98 @@ +using System.Collections.Immutable; + +namespace OpenNetty; + +/// +/// Represents an OpenNetty unit definition. +/// +public sealed class OpenNettyUnitDefinition : IEquatable +{ + /// + /// Gets or sets the identifier of the associated unit, if applicable (Nitoo-only). + /// + public ushort? AssociatedUnitId { get; init; } + + /// + /// Gets or sets the capabilities associated with the unit definition. + /// + public required ImmutableHashSet Capabilities { get; init; } = []; + + /// + /// Gets or sets the identifier of the unit. + /// + public required ushort Id { get; init; } + + /// + /// Gets or sets the OpenNetty-defined settings associated with the unit definition. + /// + public ImmutableDictionary Settings { get; init; } = + ImmutableDictionary.Empty; + + /// + public bool Equals(OpenNettyUnitDefinition? other) + { + if (ReferenceEquals(this, other)) + { + return true; + } + + return other is not null && + AssociatedUnitId == other.AssociatedUnitId && + Capabilities.Count == other.Capabilities.Count && Capabilities.Except(other.Capabilities).IsEmpty && + Id == other.Id && + Settings.Count == other.Settings.Count && !Settings.Except(other.Settings).Any(); + } + + /// + public override bool Equals(object? obj) => obj is OpenNettyUnitDefinition definition && Equals(definition); + + /// + public override int GetHashCode() + { + var hash = new HashCode(); + hash.Add(AssociatedUnitId); + + hash.Add(Capabilities.Count); + foreach (var capability in Capabilities) + { + hash.Add(capability); + } + + hash.Add(Id); + + hash.Add(Settings.Count); + foreach (var (name, value) in Settings) + { + hash.Add(name); + hash.Add(value); + } + + return hash.ToHashCode(); + } + + /// + /// Determines whether the unit has the specified capability. + /// + /// The capability name. + /// + /// if the unit has the specified capability, otherwise. + /// + public bool HasCapability(OpenNettyCapability capability) => Capabilities.Contains(capability); + + /// + /// Determines whether two instances are equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are equal, otherwise. + public static bool operator ==(OpenNettyUnitDefinition? left, OpenNettyUnitDefinition? right) + => ReferenceEquals(left, right) || (left is not null && right is not null && left.Equals(right)); + + /// + /// Determines whether two instances are not equal. + /// + /// The first instance. + /// The second instance. + /// if the two instances are not equal, otherwise. + public static bool operator !=(OpenNettyUnitDefinition? left, OpenNettyUnitDefinition? right) => !(left == right); +} diff --git a/src/OpenNetty/OpenNettyWorker.cs b/src/OpenNetty/OpenNettyWorker.cs new file mode 100644 index 0000000..d6e0715 --- /dev/null +++ b/src/OpenNetty/OpenNettyWorker.cs @@ -0,0 +1,318 @@ +using System.Diagnostics; +using System.Runtime.CompilerServices; +using System.Threading.Channels; +using Polly; + +namespace OpenNetty; + +/// +/// Represents a worker responsible for processing incoming and outgoing notifications. +/// +public class OpenNettyWorker : IOpenNettyWorker +{ + private readonly OpenNettyLogger _logger; + + /// + /// Creates a new instance of the class. + /// + /// The OpenNetty logger. + public OpenNettyWorker(OpenNettyLogger logger) + => _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + /// + public Task ProcessNotificationsAsync( + OpenNettyGateway gateway, + ChannelReader reader, + ChannelWriter writer, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(gateway); + ArgumentNullException.ThrowIfNull(reader); + ArgumentNullException.ThrowIfNull(writer); + + _logger.WorkerStarting(gateway); + + List tasks = []; + + if (gateway.Device.Definition.Capabilities.Contains(OpenNettyCapabilities.OpenWebNetGenericSession)) + { + tasks.Add(CreateSharedSessionWorkerAsync(gateway, OpenNettySessionType.Generic, cancellationToken)); + } + + if (gateway.Device.Definition.Capabilities.Contains(OpenNettyCapabilities.OpenWebNetEventSession)) + { + tasks.Add(CreateSharedSessionWorkerAsync(gateway, OpenNettySessionType.Event, cancellationToken)); + } + + if (gateway.Device.Definition.Capabilities.Contains(OpenNettyCapabilities.OpenWebNetCommandSession)) + { + for (var index = 0; index < gateway.Options.MaximumConcurrentCommandSessions; index++) + { + tasks.Add(CreateAdHocSessionWorkerAsync(gateway, OpenNettySessionType.Command, + gateway.Options.CommandSessionMaximumLifetime, cancellationToken)); + } + } + + _logger.WorkerStarted(gateway); + + return Task.WhenAll(tasks); + + async Task CreateSharedSessionWorkerAsync(OpenNettyGateway gateway, OpenNettySessionType type, CancellationToken cancellationToken) + { + var context = ResilienceContextPool.Shared.Get(cancellationToken); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyGateway)), gateway); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettySessionType)), type); + + _logger.TaskRunnerScheduled(gateway, type); + + await gateway.Options.SessionResiliencePipeline.ExecuteAsync(async context => + { + using var source = CancellationTokenSource.CreateLinkedTokenSource(context.CancellationToken); + await using var session = await OpenNettySession.CreateAsync(gateway, type, source.Token); + + _logger.SessionOpen(gateway, type, session); + + try + { + await using (await session.SubscribeAsync( + async message => + { + _logger.MessageReceived(message, gateway, session); + + await writer.WriteAsync(new OpenNettyNotifications.MessageReceived + { + Gateway = gateway, + Message = message, + Session = session + }); + }, + async exception => await source.CancelAsync(), + async () => await source.CancelAsync())) + + await using (await session.ConnectAsync()) + { + if (type is OpenNettySessionType.Generic) + { + await foreach (var notification in reader.ReadAllAsync(source.Token)) + { + if (notification is OpenNettyNotifications.MessageReady + { + Message : OpenNettyMessage message, + Options : OpenNettyTransmissionOptions options, + Transaction: OpenNettyTransaction transaction + }) + { + await SendMessageAsync(gateway, session, message, options, transaction, source.Token); + } + } + } + + else + { + await WaitCancellationAsync(source.Token); + } + } + + _logger.SessionClosed(session); + } + + catch (OperationCanceledException) when (source.Token.IsCancellationRequested) + { + _logger.SessionClosed(session); + } + }, context); + } + + async Task CreateAdHocSessionWorkerAsync( + OpenNettyGateway gateway, OpenNettySessionType type, TimeSpan timeout, CancellationToken cancellationToken) + { + var context = ResilienceContextPool.Shared.Get(cancellationToken); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettyGateway)), gateway); + context.Properties.Set(new ResiliencePropertyKey(nameof(OpenNettySessionType)), type); + + _logger.TaskRunnerScheduled(gateway, type); + + await gateway.Options.SessionResiliencePipeline.ExecuteAsync(async context => + { + // Wait until a new notification is ready to be processed. + while (await reader.WaitToReadAsync(context.CancellationToken)) + { + if (!reader.TryRead(out OpenNettyNotification? notification) || + notification is not OpenNettyNotifications.MessageReady) + { + continue; + } + + using var source = CancellationTokenSource.CreateLinkedTokenSource(context.CancellationToken); + await using var session = await OpenNettySession.CreateAsync(gateway, type, source.Token); + + _logger.SessionOpen(gateway, type, session); + + try + { + await using var subscription = await session.SubscribeAsync( + async message => + { + _logger.MessageReceived(message, gateway, session); + + await writer.WriteAsync(new OpenNettyNotifications.MessageReceived + { + Gateway = gateway, + Message = message, + Session = session + }); + }, + async exception => await source.CancelAsync(), + async () => await source.CancelAsync()); + + await using var connection = await session.ConnectAsync(); + var stopwatch = Stopwatch.StartNew(); + + do + { + // If a message is ready to be sent, send it immediately. + if (notification is OpenNettyNotifications.MessageReady + { + Message : OpenNettyMessage message, + Options : OpenNettyTransmissionOptions options, + Transaction: OpenNettyTransaction transaction + }) + { + await SendMessageAsync(gateway, session, message, options, transaction, context.CancellationToken); + + // Reset the stopwatch after successfully sending a message. + stopwatch.Restart(); + } + + // Otherwise, wait for a new notification to be published: if no notification is published + // within a short period of time, exit the loop to re-evaluate whether the session should be + // closed by OpenNetty for inactivity before it is terminated by the OpenWebNet gateway itself. + else + { + await await Task.WhenAny( + WaitCancellationAsync(source.Token), + reader.WaitToReadAsync(context.CancellationToken).AsTask(), + Task.Delay(TimeSpan.FromSeconds(1), context.CancellationToken)); + } + } + + // If an additional message is ready to be sent, re-use the current session + // to send it. Otherwise, stop iterating so that the session can be closed. + while (reader.TryRead(out notification) || stopwatch.Elapsed < timeout); + + _logger.SessionClosed(session); + } + + catch (OperationCanceledException) when (source.Token.IsCancellationRequested) + { + _logger.SessionClosed(session); + } + } + }, context); + } + + async Task SendMessageAsync( + OpenNettyGateway gateway, OpenNettySession session, OpenNettyMessage message, + OpenNettyTransmissionOptions options, OpenNettyTransaction transaction, CancellationToken cancellationToken) + { + try + { + await session.SendAsync(message, options, cancellationToken); + } + + catch (OpenNettyException exception) when (exception.ErrorCode is OpenNettyErrorCode.GatewayBusy) + { + _logger.GatewayBusy(message, gateway, session); + + await writer.WriteAsync(cancellationToken: cancellationToken, item: new OpenNettyNotifications.GatewayBusy + { + Gateway = gateway, + Message = message, + Session = session, + Transaction = transaction + }); + + return; + } + + catch (OpenNettyException exception) when (exception.ErrorCode is OpenNettyErrorCode.InvalidAction) + { + _logger.InvalidAction(message, gateway, session); + + await writer.WriteAsync(cancellationToken: cancellationToken, item: new OpenNettyNotifications.InvalidAction + { + Gateway = gateway, + Message = message, + Session = session, + Transaction = transaction + }); + + return; + } + + catch (OpenNettyException exception) when (exception.ErrorCode is OpenNettyErrorCode.NoActionReceived) + { + _logger.NoActionReceived(message, gateway, session); + + await writer.WriteAsync(cancellationToken: cancellationToken, item: new OpenNettyNotifications.NoActionReceived + { + Gateway = gateway, + Message = message, + Session = session, + Transaction = transaction + }); + + return; + } + + catch (OpenNettyException exception) when (exception.ErrorCode is OpenNettyErrorCode.NoAcknowledgementReceived) + { + _logger.NoAcknowledgementReceived(message, gateway, session); + + await writer.WriteAsync(cancellationToken: cancellationToken, item: new OpenNettyNotifications.NoAcknowledgmentReceived + { + Gateway = gateway, + Message = message, + Session = session, + Transaction = transaction + }); + + // Note: these exceptions may indicate the session is stale and are re-thrown to ensure it is discarded. + throw; + } + + catch (OpenNettyException exception) when (exception.ErrorCode is OpenNettyErrorCode.InvalidFrame) + { + _logger.InvalidFrame(message, gateway, session); + + await writer.WriteAsync(cancellationToken: cancellationToken, item: new OpenNettyNotifications.InvalidFrame + { + Gateway = gateway, + Message = message, + Session = session, + Transaction = transaction + }); + + return; + } + + _logger.MessageSent(message, gateway, session); + + await writer.WriteAsync(cancellationToken: cancellationToken, item: new OpenNettyNotifications.MessageSent + { + Gateway = gateway, + Message = message, + Session = session, + Transaction = transaction + }); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + static async Task WaitCancellationAsync(CancellationToken cancellationToken) + { + var source = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + using var registration = cancellationToken.Register(static state => ((TaskCompletionSource) state!).SetResult(), source); + await source.Task; + } + } +}